Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,12 @@ replacements:
"ReadRowsStream",
"ReadSession",
count: 1
# Given that this file is mostly handwritten, we could omit the file completely when we migrate to librarian
# See `preserve_regex` in https://github.com/googleapis/librarian/blob/main/doc/language-onboarding.md#generate
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/__init__.py,
]
before: |
__version__ = package_version.__version__\n\n
from .services.big_query_read import BigQueryReadAsyncClient, BigQueryReadClient
from .services.big_query_write import BigQueryWriteAsyncClient, BigQueryWriteClient
from .types.arrow import ArrowRecordBatch, ArrowSchema, ArrowSerializationOptions
Expand Down Expand Up @@ -224,6 +223,19 @@ replacements:
WriteStreamView,
\)
from .types.table import TableFieldSchema, TableSchema\n
after: |
from google.cloud.bigquery_storage_v1 import client, types\n\n
class BigQueryReadClient(client.BigQueryReadClient):
__doc__ = client.BigQueryReadClient.__doc__\n\n
class BigQueryWriteClient(client.BigQueryWriteClient):
__doc__ = client.BigQueryWriteClient.__doc__\n\n
count: 1
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/__init__.py,
]
before: |
__all__ = \(
"BigQueryReadAsyncClient",
"BigQueryWriteAsyncClient",
Expand Down Expand Up @@ -265,12 +277,6 @@ replacements:
"WriteStreamView",
\)
after: |
__version__ = package_version.__version__\n
from google.cloud.bigquery_storage_v1 import client, types\n\n
class BigQueryReadClient(client.BigQueryReadClient):
__doc__ = client.BigQueryReadClient.__doc__\n\n
class BigQueryWriteClient(client.BigQueryWriteClient):
__doc__ = client.BigQueryWriteClient.__doc__\n\n
__all__ = (
# google.cloud.bigquery_storage_v1
"__version__",
Expand All @@ -280,14 +286,13 @@ replacements:
"BigQueryWriteClient",
)
count: 1
# Given that this file is mostly handwritten, we could omit the file completely when we migrate to librarian
# See `preserve_regex` in https://github.com/googleapis/librarian/blob/main/doc/language-onboarding.md#generate
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1alpha/__init__.py,
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1beta/__init__.py,
]
before: |
__version__ = package_version.__version__\n\n
from .services.metastore_partition_service import \(
MetastorePartitionServiceAsyncClient,
MetastorePartitionServiceClient,
Expand Down Expand Up @@ -316,6 +321,16 @@ replacements:
StorageDescriptor,
StreamList,
\)\n
after: ""
count: 2
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1alpha/__init__.py,
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1beta/__init__.py,
]
before: |
\)\n
__all__ = \(
"MetastorePartitionServiceAsyncClient",
"BatchCreateMetastorePartitionsRequest",
Expand All @@ -341,15 +356,14 @@ replacements:
"UpdateMetastorePartitionRequest",
\)
after: |
__version__ = package_version.__version__
)
count: 2
# Given that this file is mostly handwritten, we could omit the file completely when we migrate to librarian
# See `preserve_regex` in https://github.com/googleapis/librarian/blob/main/doc/language-onboarding.md#generate
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1beta2/__init__.py,
]
before: |
__version__ = package_version.__version__\n\n
from .services.big_query_read import BigQueryReadAsyncClient, BigQueryReadClient
from .services.big_query_write import BigQueryWriteAsyncClient, BigQueryWriteClient
from .types.arrow import ArrowRecordBatch, ArrowSchema, ArrowSerializationOptions
Expand Down Expand Up @@ -377,6 +391,19 @@ replacements:
\)
from .types.stream import DataFormat, ReadSession, ReadStream, WriteStream
from .types.table import TableFieldSchema, TableSchema\n
after: |
from google.cloud.bigquery_storage_v1beta2 import client, types\n\n
class BigQueryReadClient(client.BigQueryReadClient):
__doc__ = client.BigQueryReadClient.__doc__\n\n
class BigQueryWriteClient(client.BigQueryWriteClient):
__doc__ = client.BigQueryWriteClient.__doc__\n\n
count: 1
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1beta2/__init__.py,
]
before: |
__all__ = \(
"BigQueryReadAsyncClient",
"BigQueryWriteAsyncClient",
Expand Down Expand Up @@ -415,12 +442,6 @@ replacements:
"WriteStream",
\)
after: |
__version__ = package_version.__version__\n
from google.cloud.bigquery_storage_v1beta2 import client, types\n\n
class BigQueryReadClient(client.BigQueryReadClient):
__doc__ = client.BigQueryReadClient.__doc__\n\n
class BigQueryWriteClient(client.BigQueryWriteClient):
__doc__ = client.BigQueryWriteClient.__doc__\n\n
__all__ = (
# google.cloud.bigquery_storage_v1beta2
"__version__",
Expand All @@ -430,9 +451,6 @@ replacements:
"BigQueryWriteClient",
)
count: 1
# Given that this file is mostly handwritten, we could omit the file completely when we migrate to librarian
# See `preserve_regex` in https://github.com/googleapis/librarian/blob/main/doc/language-onboarding.md#generate
# We could also consider updating the docs/index.rst template so that the majority of handwritten changes are not required
- paths: [
packages/google-cloud-bigquery-storage/testing/constraints-3.7.txt,
]
Expand All @@ -447,6 +465,8 @@ replacements:
pyarrow==0.15.0
google-auth==2.14.1
count: 1
# Given that this file is mostly handwritten, we could omit the file during code generation
# This will require a change to gapic-generator-python to provide the ability to omit files
- paths: [
packages/google-cloud-bigquery-storage/docs/index.rst,
]
Expand Down
2 changes: 1 addition & 1 deletion .librarian/state.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:d7caef319a25d618e20ba798b103434700bfd80015f525802d87621ca2528c90
image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209
libraries:
- id: google-ads-admanager
version: 0.7.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys

import google.api_core as api_core

from google.cloud.bigquery_storage_v1 import gapic_version as package_version

__version__ = package_version.__version__

if sys.version_info >= (3, 8): # pragma: NO COVER
from importlib import metadata
else: # pragma: NO COVER
# TODO(https://github.com/googleapis/python-api-core/issues/835): Remove
# this code path once we drop support for Python 3.7
import importlib_metadata as metadata

from google.cloud.bigquery_storage_v1 import client, types


Expand All @@ -28,6 +39,100 @@ class BigQueryWriteClient(client.BigQueryWriteClient):
__doc__ = client.BigQueryWriteClient.__doc__


if hasattr(api_core, "check_python_version") and hasattr(
api_core, "check_dependency_versions"
): # pragma: NO COVER
api_core.check_python_version("google.cloud.bigquery_storage_v1") # type: ignore
api_core.check_dependency_versions("google.cloud.bigquery_storage_v1") # type: ignore
else: # pragma: NO COVER
# An older version of api_core is installed which does not define the
# functions above. We do equivalent checks manually.
try:
import sys
import warnings

_py_version_str = sys.version.split()[0]
_package_label = "google.cloud.bigquery_storage_v1"
Comment on lines +50 to +55

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The _package_label variable is defined inside the try block but is also used in the except block. If an exception occurs before _package_label is assigned (e.g., during an import), a NameError will be raised within the except block, which can hide the original error. To make the error handling more robust, it's better to define _package_label before the try block. This issue is present in other __init__.py files in this PR.

Suggested change
try:
import sys
import warnings
_py_version_str = sys.version.split()[0]
_package_label = "google.cloud.bigquery_storage_v1"
_package_label = "google.cloud.bigquery_storage_v1"
try:
import sys
import warnings
_py_version_str = sys.version.split()[0]

if sys.version_info < (3, 9):
warnings.warn(
"You are using a non-supported Python version "
+ f"({_py_version_str}). Google will not post any further "
+ f"updates to {_package_label} supporting this Python version. "
+ "Please upgrade to the latest Python version, or at "
+ f"least to Python 3.9, and then update {_package_label}.",
FutureWarning,
)
if sys.version_info[:2] == (3, 9):
warnings.warn(
f"You are using a Python version ({_py_version_str}) "
+ f"which Google will stop supporting in {_package_label} in "
+ "January 2026. Please "
+ "upgrade to the latest Python version, or at "
+ "least to Python 3.10, before then, and "
+ f"then update {_package_label}.",
FutureWarning,
)

def parse_version_to_tuple(version_string: str):
"""Safely converts a semantic version string to a comparable tuple of integers.
Example: "4.25.8" -> (4, 25, 8)
Ignores non-numeric parts and handles common version formats.
Args:
version_string: Version string in the format "x.y.z" or "x.y.z<suffix>"
Returns:
Tuple of integers for the parsed version string.
"""
parts = []
for part in version_string.split("."):
try:
parts.append(int(part))
except ValueError:
# If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here.
# This is a simplification compared to 'packaging.parse_version', but sufficient
# for comparing strictly numeric semantic versions.
break
return tuple(parts)
Comment on lines +76 to +94

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The parse_version_to_tuple function is a simplification that doesn't handle pre-release identifiers correctly. For example, a version like "4.25.8b1" would be parsed as (4, 25, 8), which is not less than (4, 25, 8). However, 4.25.8b1 is a pre-release of 4.25.8 and should be considered older. This could lead to missing warnings for users on pre-release versions. Consider using packaging.version.parse for a more robust version comparison. While this might add a dependency, packaging is a standard library for this purpose and is often available in environments with setuptools.


def _get_version(dependency_name):
try:
version_string: str = metadata.version(dependency_name)
parsed_version = parse_version_to_tuple(version_string)
return (parsed_version, version_string)
except Exception:
# Catch exceptions from metadata.version() (e.g., PackageNotFoundError)
# or errors during parse_version_to_tuple
return (None, "--")

_dependency_package = "google.protobuf"
_next_supported_version = "4.25.8"
_next_supported_version_tuple = (4, 25, 8)
_recommendation = " (we recommend 6.x)"
(_version_used, _version_used_string) = _get_version(_dependency_package)
if _version_used and _version_used < _next_supported_version_tuple:
warnings.warn(
f"Package {_package_label} depends on "
+ f"{_dependency_package}, currently installed at version "
+ f"{_version_used_string}. Future updates to "
+ f"{_package_label} will require {_dependency_package} at "
+ f"version {_next_supported_version} or higher{_recommendation}."
+ " Please ensure "
+ "that either (a) your Python environment doesn't pin the "
+ f"version of {_dependency_package}, so that updates to "
+ f"{_package_label} can require the higher version, or "
+ "(b) you manually update your Python environment to use at "
+ f"least version {_next_supported_version} of "
+ f"{_dependency_package}.",
FutureWarning,
)
except Exception:
warnings.warn(
"Could not determine the version of Python "
+ "currently being used. To continue receiving "
+ "updates for {_package_label}, ensure you are "
+ "using a supported version of Python; see "
+ "https://devguide.python.org/versions/"
)

__all__ = (
# google.cloud.bigquery_storage_v1
"__version__",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,34 @@ def _get_default_mtls_endpoint(api_endpoint):
_DEFAULT_ENDPOINT_TEMPLATE = "bigquerystorage.{UNIVERSE_DOMAIN}"
_DEFAULT_UNIVERSE = "googleapis.com"

@staticmethod
def _use_client_cert_effective():
"""Returns whether client certificate should be used for mTLS if the
google-auth version supports should_use_client_cert automatic mTLS enablement.

Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var.

Returns:
bool: whether client certificate should be used for mTLS
Raises:
ValueError: (If using a version of google-auth without should_use_client_cert and
GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.)
"""
# check if google-auth version supports should_use_client_cert for automatic mTLS enablement
if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER
return mtls.should_use_client_cert()
else: # pragma: NO COVER
# if unsupported, fallback to reading from env var
use_client_cert_str = os.getenv(
"GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
).lower()
if use_client_cert_str not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be"
" either `true` or `false`"
)
return use_client_cert_str == "true"

@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
Expand Down Expand Up @@ -383,20 +411,16 @@ def get_mtls_endpoint_and_cert_source(
)
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_client_cert = BigQueryReadClient._use_client_cert_effective()
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)

# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if use_client_cert:
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
Expand Down Expand Up @@ -428,20 +452,14 @@ def _read_environment_variables():
google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
is not any of ["auto", "never", "always"].
"""
use_client_cert = os.getenv(
"GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
).lower()
use_client_cert = BigQueryReadClient._use_client_cert_effective()
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
return use_client_cert, use_mtls_endpoint, universe_domain_env

@staticmethod
def _get_client_cert_source(provided_cert_source, use_cert_flag):
Expand Down
Loading
Loading