Skip to content

Commit 28c7236

Browse files
gcf-owl-bot[bot]parthea
authored andcommitted
feat: Add support for opt-in debug logging (#855)
* feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: googleapis/googleapis@f9b8b91 Source-Link: googleapis/googleapis-gen@ca1e0a1 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 9311fc5 commit 28c7236

File tree

24 files changed

+1556
-286
lines changed

24 files changed

+1556
-286
lines changed

packages/google-cloud-bigquery-storage/.kokoro/docker/docs/requirements.txt

Lines changed: 6 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -32,41 +32,11 @@ platformdirs==4.3.6 \
3232
--hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
3333
--hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
3434
# via virtualenv
35-
tomli==2.2.1 \
36-
--hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
37-
--hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
38-
--hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
39-
--hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
40-
--hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
41-
--hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
42-
--hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
43-
--hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
44-
--hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
45-
--hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
46-
--hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
47-
--hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
48-
--hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
49-
--hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
50-
--hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
51-
--hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
52-
--hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
53-
--hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
54-
--hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
55-
--hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
56-
--hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
57-
--hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
58-
--hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
59-
--hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
60-
--hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
61-
--hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
62-
--hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
63-
--hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
64-
--hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
65-
--hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
66-
--hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
67-
--hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
35+
tomli==2.0.2 \
36+
--hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \
37+
--hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed
6838
# via nox
69-
virtualenv==20.28.0 \
70-
--hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \
71-
--hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa
39+
virtualenv==20.27.1 \
40+
--hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \
41+
--hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4
7242
# via nox

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py

Lines changed: 47 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
# limitations under the License.
1515
#
1616
from collections import OrderedDict
17+
import logging as std_logging
1718
import re
1819
from typing import (
1920
AsyncIterable,
@@ -52,6 +53,15 @@
5253
from .transports.base import DEFAULT_CLIENT_INFO, BigQueryReadTransport
5354
from .transports.grpc_asyncio import BigQueryReadGrpcAsyncIOTransport
5455

56+
try:
57+
from google.api_core import client_logging # type: ignore
58+
59+
CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
60+
except ImportError: # pragma: NO COVER
61+
CLIENT_LOGGING_SUPPORTED = False
62+
63+
_LOGGER = std_logging.getLogger(__name__)
64+
5565

5666
class BigQueryReadAsyncClient:
5767
"""BigQuery Read API.
@@ -260,6 +270,28 @@ def __init__(
260270
client_info=client_info,
261271
)
262272

273+
if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
274+
std_logging.DEBUG
275+
): # pragma: NO COVER
276+
_LOGGER.debug(
277+
"Created client `google.cloud.bigquery.storage_v1.BigQueryReadAsyncClient`.",
278+
extra={
279+
"serviceName": "google.cloud.bigquery.storage.v1.BigQueryRead",
280+
"universeDomain": getattr(
281+
self._client._transport._credentials, "universe_domain", ""
282+
),
283+
"credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
284+
"credentialsInfo": getattr(
285+
self.transport._credentials, "get_cred_info", lambda: None
286+
)(),
287+
}
288+
if hasattr(self._client._transport, "_credentials")
289+
else {
290+
"serviceName": "google.cloud.bigquery.storage.v1.BigQueryRead",
291+
"credentialsType": None,
292+
},
293+
)
294+
263295
async def create_read_session(
264296
self,
265297
request: Optional[Union[storage.CreateReadSessionRequest, dict]] = None,
@@ -269,7 +301,7 @@ async def create_read_session(
269301
max_stream_count: Optional[int] = None,
270302
retry: OptionalRetry = gapic_v1.method.DEFAULT,
271303
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
272-
metadata: Sequence[Tuple[str, str]] = (),
304+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
273305
) -> stream.ReadSession:
274306
r"""Creates a new read session. A read session divides
275307
the contents of a BigQuery table into one or more
@@ -357,8 +389,10 @@ async def sample_create_read_session():
357389
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
358390
should be retried.
359391
timeout (float): The timeout for this request.
360-
metadata (Sequence[Tuple[str, str]]): Strings which should be
361-
sent along with the request as metadata.
392+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
393+
sent along with the request as metadata. Normally, each value must be of type `str`,
394+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
395+
be of type `bytes`.
362396
363397
Returns:
364398
google.cloud.bigquery_storage_v1.types.ReadSession:
@@ -424,7 +458,7 @@ def read_rows(
424458
offset: Optional[int] = None,
425459
retry: OptionalRetry = gapic_v1.method.DEFAULT,
426460
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
427-
metadata: Sequence[Tuple[str, str]] = (),
461+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
428462
) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]:
429463
r"""Reads rows from the stream in the format prescribed
430464
by the ReadSession. Each response contains one or more
@@ -483,8 +517,10 @@ async def sample_read_rows():
483517
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
484518
should be retried.
485519
timeout (float): The timeout for this request.
486-
metadata (Sequence[Tuple[str, str]]): Strings which should be
487-
sent along with the request as metadata.
520+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
521+
sent along with the request as metadata. Normally, each value must be of type `str`,
522+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
523+
be of type `bytes`.
488524
489525
Returns:
490526
AsyncIterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]:
@@ -548,7 +584,7 @@ async def split_read_stream(
548584
*,
549585
retry: OptionalRetry = gapic_v1.method.DEFAULT,
550586
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
551-
metadata: Sequence[Tuple[str, str]] = (),
587+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
552588
) -> storage.SplitReadStreamResponse:
553589
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
554590
These ``ReadStream`` objects are referred to as the primary and
@@ -596,8 +632,10 @@ async def sample_split_read_stream():
596632
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
597633
should be retried.
598634
timeout (float): The timeout for this request.
599-
metadata (Sequence[Tuple[str, str]]): Strings which should be
600-
sent along with the request as metadata.
635+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
636+
sent along with the request as metadata. Normally, each value must be of type `str`,
637+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
638+
be of type `bytes`.
601639
602640
Returns:
603641
google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse:

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/client.py

Lines changed: 52 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
# limitations under the License.
1515
#
1616
from collections import OrderedDict
17+
import logging as std_logging
1718
import os
1819
import re
1920
from typing import (
@@ -49,6 +50,15 @@
4950
except AttributeError: # pragma: NO COVER
5051
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
5152

53+
try:
54+
from google.api_core import client_logging # type: ignore
55+
56+
CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
57+
except ImportError: # pragma: NO COVER
58+
CLIENT_LOGGING_SUPPORTED = False
59+
60+
_LOGGER = std_logging.getLogger(__name__)
61+
5262
from google.protobuf import timestamp_pb2 # type: ignore
5363

5464
from google.cloud.bigquery_storage_v1.types import arrow, avro, storage, stream
@@ -625,6 +635,10 @@ def __init__(
625635
# Initialize the universe domain validation.
626636
self._is_universe_domain_valid = False
627637

638+
if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
639+
# Setup logging.
640+
client_logging.initialize_logging()
641+
628642
api_key_value = getattr(self._client_options, "api_key", None)
629643
if api_key_value and credentials:
630644
raise ValueError(
@@ -687,6 +701,29 @@ def __init__(
687701
api_audience=self._client_options.api_audience,
688702
)
689703

704+
if "async" not in str(self._transport):
705+
if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
706+
std_logging.DEBUG
707+
): # pragma: NO COVER
708+
_LOGGER.debug(
709+
"Created client `google.cloud.bigquery.storage_v1.BigQueryReadClient`.",
710+
extra={
711+
"serviceName": "google.cloud.bigquery.storage.v1.BigQueryRead",
712+
"universeDomain": getattr(
713+
self._transport._credentials, "universe_domain", ""
714+
),
715+
"credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
716+
"credentialsInfo": getattr(
717+
self.transport._credentials, "get_cred_info", lambda: None
718+
)(),
719+
}
720+
if hasattr(self._transport, "_credentials")
721+
else {
722+
"serviceName": "google.cloud.bigquery.storage.v1.BigQueryRead",
723+
"credentialsType": None,
724+
},
725+
)
726+
690727
def create_read_session(
691728
self,
692729
request: Optional[Union[storage.CreateReadSessionRequest, dict]] = None,
@@ -696,7 +733,7 @@ def create_read_session(
696733
max_stream_count: Optional[int] = None,
697734
retry: OptionalRetry = gapic_v1.method.DEFAULT,
698735
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
699-
metadata: Sequence[Tuple[str, str]] = (),
736+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
700737
) -> stream.ReadSession:
701738
r"""Creates a new read session. A read session divides
702739
the contents of a BigQuery table into one or more
@@ -784,8 +821,10 @@ def sample_create_read_session():
784821
retry (google.api_core.retry.Retry): Designation of what errors, if any,
785822
should be retried.
786823
timeout (float): The timeout for this request.
787-
metadata (Sequence[Tuple[str, str]]): Strings which should be
788-
sent along with the request as metadata.
824+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
825+
sent along with the request as metadata. Normally, each value must be of type `str`,
826+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
827+
be of type `bytes`.
789828
790829
Returns:
791830
google.cloud.bigquery_storage_v1.types.ReadSession:
@@ -848,7 +887,7 @@ def read_rows(
848887
offset: Optional[int] = None,
849888
retry: OptionalRetry = gapic_v1.method.DEFAULT,
850889
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
851-
metadata: Sequence[Tuple[str, str]] = (),
890+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
852891
) -> Iterable[storage.ReadRowsResponse]:
853892
r"""Reads rows from the stream in the format prescribed
854893
by the ReadSession. Each response contains one or more
@@ -907,8 +946,10 @@ def sample_read_rows():
907946
retry (google.api_core.retry.Retry): Designation of what errors, if any,
908947
should be retried.
909948
timeout (float): The timeout for this request.
910-
metadata (Sequence[Tuple[str, str]]): Strings which should be
911-
sent along with the request as metadata.
949+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
950+
sent along with the request as metadata. Normally, each value must be of type `str`,
951+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
952+
be of type `bytes`.
912953
913954
Returns:
914955
Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]:
@@ -969,7 +1010,7 @@ def split_read_stream(
9691010
*,
9701011
retry: OptionalRetry = gapic_v1.method.DEFAULT,
9711012
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
972-
metadata: Sequence[Tuple[str, str]] = (),
1013+
metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
9731014
) -> storage.SplitReadStreamResponse:
9741015
r"""Splits a given ``ReadStream`` into two ``ReadStream`` objects.
9751016
These ``ReadStream`` objects are referred to as the primary and
@@ -1017,8 +1058,10 @@ def sample_split_read_stream():
10171058
retry (google.api_core.retry.Retry): Designation of what errors, if any,
10181059
should be retried.
10191060
timeout (float): The timeout for this request.
1020-
metadata (Sequence[Tuple[str, str]]): Strings which should be
1021-
sent along with the request as metadata.
1061+
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
1062+
sent along with the request as metadata. Normally, each value must be of type `str`,
1063+
but for metadata keys ending with the suffix `-bin`, the corresponding values must
1064+
be of type `bytes`.
10221065
10231066
Returns:
10241067
google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse:

0 commit comments

Comments
 (0)