Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 92 additions & 17 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import subprocess
import sys
import tempfile
import time
import yaml
from datetime import date, datetime
from functools import lru_cache
Expand All @@ -31,6 +32,40 @@
import build.util
import parse_googleapis_content

logging.basicConfig(stream=sys.stdout, level=logging.INFO)

import functools

PERF_LOGGING_ENABLED = os.environ.get("ENABLE_PERF_LOGS") == "1"

if PERF_LOGGING_ENABLED:
perf_logger = logging.getLogger("performance_metrics")
perf_logger.setLevel(logging.INFO)
perf_handler = logging.FileHandler("/tmp/performance_metrics.log", mode='w')
perf_formatter = logging.Formatter('%(asctime)s | %(message)s', datefmt='%H:%M:%S')
perf_handler.setFormatter(perf_formatter)
perf_logger.addHandler(perf_handler)
perf_logger.propagate = False

def track_time(func):
"""
Decorator. Usage: @track_time
If logging is OFF, it returns the original function (Zero Overhead).
If logging is ON, it wraps the function to measure execution time.
"""
if not PERF_LOGGING_ENABLED:
return func

@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.perf_counter()
try:
return func(*args, **kwargs)
finally:
duration = time.perf_counter() - start_time
perf_logger.info(f"{func.__name__:<30} | {duration:.4f} seconds")

return wrapper

try:
import synthtool
Expand Down Expand Up @@ -323,8 +358,9 @@ def _get_library_id(request_data: Dict) -> str:
return library_id


@track_time
def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
"""Runs the synthtool post-processor on the output directory.
"""Runs the synthtool post-processor (templates) and Ruff formatter (lint/format).

Args:
output(str): Path to the directory in the container where code
Expand All @@ -334,25 +370,57 @@ def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
"""
os.chdir(output)
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
logger.info("Running Python post-processor...")

# 1. Run Synthtool (Templates & Fixers only)
# Note: This relies on 'nox' being disabled in your environment (via run_fast.sh shim)
# to avoid the slow formatting step inside owlbot.
logger.info("Running Python post-processor (Templates & Fixers)...")
if SYNTHTOOL_INSTALLED:
if is_mono_repo:
python_mono_repo.owlbot_main(path_to_library)
else:
# Some repositories have customizations in `librarian.py`.
# If this file exists, run those customizations instead of `owlbot_main`
if Path(f"{output}/librarian.py").exists():
subprocess.run(["python3.14", f"{output}/librarian.py"])
try:
if is_mono_repo:
python_mono_repo.owlbot_main(path_to_library)
else:
python.owlbot_main()
else:
raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER
# Handle custom librarian scripts if present
if Path(f"{output}/librarian.py").exists():
subprocess.run(["python3.14", f"{output}/librarian.py"])
else:
python.owlbot_main()
except Exception as e:
logger.warning(f"Synthtool warning (non-fatal): {e}")

# 2. Run RUFF (Fast Formatter & Import Sorter)
# This replaces both 'isort' and 'black' and runs in < 1 second.
# We hardcode flags here to match Black defaults so you don't need config files.
# logger.info("🚀 Running Ruff (Fast Formatter)...")
ruff_config_path = Path("/usr/local/google/home/omairn/git/googleapis/google-cloud-python/.generator/ruff.toml").resolve()

if not ruff_config_path.exists():
logger.warning(f"⚠️ Could not find Ruff config at {ruff_config_path}. Using defaults.")

try:

# If there is no noxfile, run `isort`` and `black` on the output.
# This is required for proto-only libraries which are not GAPIC.
if not Path(f"{output}/{path_to_library}/noxfile.py").exists():
subprocess.run(["isort", output])
subprocess.run(["black", output])
subprocess.run(["ruff", "--version"], check=False)
logger.info("Running Ruff Check (Imports)...")
base_args = ["ruff", "--config", str(ruff_config_path)]

# STEP A: Fix Imports (like isort)
subprocess.run(
base_args + ["check", "--fix", "."],
check=True,
)

# STEP B: Format Code (like black)
subprocess.run(
base_args + ["format", "."],
check=True,
)
logger.info("Ruff formatting completed successfully.")

except FileNotFoundError:
logger.warning("⚠️ Ruff binary not found. Code will be unformatted.")
logger.warning(" Please run: pip install ruff")
except subprocess.CalledProcessError as e:
logger.error(f"❌ Ruff failed with exit code {e.returncode}.")

logger.info("Python post-processor ran successfully.")

Expand Down Expand Up @@ -392,6 +460,7 @@ def _add_header_to_files(directory: str) -> None:
f.writelines(lines)


@track_time
def _copy_files_needed_for_post_processing(
output: str, input: str, library_id: str, is_mono_repo: bool
):
Expand Down Expand Up @@ -444,6 +513,7 @@ def _copy_files_needed_for_post_processing(
)


@track_time
def _clean_up_files_after_post_processing(
output: str, library_id: str, is_mono_repo: bool
):
Expand Down Expand Up @@ -590,6 +660,7 @@ def _get_repo_name_from_repo_metadata(base: str, library_id: str, is_mono_repo:
return repo_name


@track_time
def _generate_repo_metadata_file(
output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool
):
Expand Down Expand Up @@ -631,6 +702,7 @@ def _generate_repo_metadata_file(
_write_json_file(output_repo_metadata, metadata_content)


@track_time
def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
"""Copies the README.rst file for a generated library to docs/README.rst.

Expand Down Expand Up @@ -672,6 +744,7 @@ def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
f.write(content)


@track_time
def handle_generate(
librarian: str = LIBRARIAN_DIR,
source: str = SOURCE_DIR,
Expand Down Expand Up @@ -933,6 +1006,7 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None:
shutil.copytree(tmp_dir, staging_dir, dirs_exist_ok=True)


@track_time
def _generate_api(
api_path: str,
library_id: str,
Expand Down Expand Up @@ -1748,6 +1822,7 @@ def handle_release_stage(
output=args.output,
input=args.input,
)

elif args.command == "build":
args.func(librarian=args.librarian, repo=args.repo)
elif args.command == "release-stage":
Expand Down
3 changes: 1 addition & 2 deletions .generator/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,4 @@ gapic-generator==1.30.3 # Fix mypy checks https://github.com/googleapis/gapic-ge
nox
starlark-pyo3>=2025.1
build
black==23.7.0
isort==5.11.0
ruff
22 changes: 22 additions & 0 deletions .generator/ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# .generator/ruff.toml
line-length = 88
# Enable preview to get the strictest Black compatibility
preview = true

[lint]
select = ["I"]

[lint.isort]
# 1. Force 'google' to be first-party (keeps google.* imports together)
# known-first-party = ["google"]

# 2. Combine imports from the same module (Standard isort behavior)
# Prevents splitting "from x import a, b" into multiple lines.
# combine-as-imports = true

# 3. Sort imports by module name, ignoring "from" vs "import" (Standard isort behavior)
# This minimizes diffs where isort mixed them but Ruff separated them.
force-sort-within-sections = true

[format]
quote-style = "double"
40 changes: 40 additions & 0 deletions .librarian/generate-request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"id": "google-cloud-vision",
"version": "3.12.0",
"apis": [
{
"path": "google/cloud/vision/v1p3beta1",
"service_config": "vision_v1p3beta1.yaml"
},
{
"path": "google/cloud/vision/v1",
"service_config": "vision_v1.yaml"
},
{
"path": "google/cloud/vision/v1p1beta1",
"service_config": "vision_v1p1beta1.yaml"
},
{
"path": "google/cloud/vision/v1p2beta1",
"service_config": "vision_v1p2beta1.yaml"
},
{
"path": "google/cloud/vision/v1p4beta1",
"service_config": "vision_v1p4beta1.yaml"
}
],
"source_roots": [
"packages/google-cloud-vision"
],
"preserve_regex": [
"packages/google-cloud-vision/CHANGELOG.md",
"docs/CHANGELOG.md",
"samples/README.txt",
"samples/snippets/README.rst",
"tests/system"
],
"remove_regex": [
"packages/google-cloud-vision/"
],
"tag_format": "{id}-v{version}"
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@
__version__ = package_version.__version__


from google.cloud.vision_v1 import ImageAnnotatorClient
from google.cloud.vision_v1.services.image_annotator.async_client import (
ImageAnnotatorAsyncClient,
)
from google.cloud.vision_v1 import ImageAnnotatorClient
from google.cloud.vision_v1.services.product_search.async_client import (
ProductSearchAsyncClient,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import sys

import google.api_core as api_core

from google.cloud.vision_v1 import gapic_version as package_version

__version__ = package_version.__version__
Expand All @@ -28,11 +27,8 @@
# this code path once we drop support for Python 3.7
import importlib_metadata as metadata

from google.cloud.vision_helpers import VisionHelpers
from google.cloud.vision_helpers.decorators import add_single_feature_methods

from .services.image_annotator import ImageAnnotatorAsyncClient
from .services.image_annotator import ImageAnnotatorClient as IacImageAnnotatorClient
from .services.image_annotator import ImageAnnotatorAsyncClient, ImageAnnotatorClient
from .services.product_search import ProductSearchAsyncClient, ProductSearchClient
from .types.geometry import BoundingPoly, NormalizedVertex, Position, Vertex
from .types.image_annotator import (
Expand Down Expand Up @@ -208,13 +204,6 @@ def _get_version(dependency_name):
+ "https://devguide.python.org/versions/"
)


@add_single_feature_methods
class ImageAnnotatorClient(VisionHelpers, IacImageAnnotatorClient):
__doc__ = IacImageAnnotatorClient.__doc__
Feature = Feature


__all__ = (
"ImageAnnotatorAsyncClient",
"ProductSearchAsyncClient",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,21 @@
from google.api_core import retry_async as retries
from google.api_core.client_options import ClientOptions
from google.auth import credentials as ga_credentials # type: ignore
from google.cloud.vision_v1 import gapic_version as package_version
from google.oauth2 import service_account # type: ignore
import google.protobuf

from google.cloud.vision_v1 import gapic_version as package_version

try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore

from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.longrunning import operations_pb2 # type: ignore

from google.api_core import (
operation, # type: ignore
operation_async, # type: ignore
)
from google.cloud.vision_v1.types import image_annotator
from google.longrunning import operations_pb2 # type: ignore

from .client import ImageAnnotatorClient
from .transports.base import DEFAULT_CLIENT_INFO, ImageAnnotatorTransport
Expand Down Expand Up @@ -122,7 +122,9 @@ def from_service_account_info(cls, info: dict, *args, **kwargs):
Returns:
ImageAnnotatorAsyncClient: The constructed client.
"""
return ImageAnnotatorClient.from_service_account_info.__func__(ImageAnnotatorAsyncClient, info, *args, **kwargs) # type: ignore
return ImageAnnotatorClient.from_service_account_info.__func__(
ImageAnnotatorAsyncClient, info, *args, **kwargs
) # type: ignore

@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
Expand All @@ -138,7 +140,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
Returns:
ImageAnnotatorAsyncClient: The constructed client.
"""
return ImageAnnotatorClient.from_service_account_file.__func__(ImageAnnotatorAsyncClient, filename, *args, **kwargs) # type: ignore
return ImageAnnotatorClient.from_service_account_file.__func__(
ImageAnnotatorAsyncClient, filename, *args, **kwargs
) # type: ignore

from_service_account_json = from_service_account_file

Expand Down
Loading
Loading