|
| 1 | +# Copyright 2026 Google LLC |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | +# |
| 7 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +# |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | +from __future__ import annotations |
| 16 | + |
| 17 | +import logging |
| 18 | +from typing import Any, Dict, List, Optional |
| 19 | + |
| 20 | +from google.api_core import exceptions as api_exceptions |
| 21 | +from google.auth.credentials import Credentials |
| 22 | +from google.cloud import dataplex_v1 |
| 23 | + |
| 24 | +from . import client |
| 25 | +from .config import BigQueryToolConfig |
| 26 | + |
| 27 | +def _construct_search_query_helper(predicate: str, operator: str, items: List[str]) -> str: |
| 28 | + if not items: |
| 29 | + return "" |
| 30 | + if len(items) == 1: |
| 31 | + return f'{predicate}{operator}"{items[0]}"' |
| 32 | + |
| 33 | + clauses = [f'{predicate}{operator}"{item}"' for item in items] |
| 34 | + return "(" + " OR ".join(clauses) + ")" |
| 35 | + |
| 36 | +def search_catalog( |
| 37 | + prompt: str, |
| 38 | + project_id: str, |
| 39 | + credentials: Credentials, |
| 40 | + settings: BigQueryToolConfig, |
| 41 | + location: str, |
| 42 | + page_size: int = 10, |
| 43 | + project_ids_filter: Optional[List[str]] = None, |
| 44 | + dataset_ids_filter: Optional[List[str]] = None, |
| 45 | + types_filter: Optional[List[str]] = None, |
| 46 | +) -> Dict[str, Any]: |
| 47 | + """Search for BigQuery assets within Dataplex. |
| 48 | +
|
| 49 | + Args: |
| 50 | + prompt (str): The base search query (natural language or keywords). |
| 51 | + project_id (str): The Google Cloud project ID to scope the search. |
| 52 | + credentials (Credentials): Credentials for the request. |
| 53 | + settings (BigQueryToolConfig): BigQuery tool settings. |
| 54 | + location (str): The Dataplex location to use. |
| 55 | + page_size (int): Maximum number of results. |
| 56 | + project_ids_filter (Optional[List[str]]): Specific project IDs to include in the search results. |
| 57 | + If None, defaults to the scoping project_id. |
| 58 | + dataset_ids_filter (Optional[List[str]]): BigQuery dataset IDs to filter by. |
| 59 | + types_filter (Optional[List[str]]): Entry types to filter by (e.g., "TABLE", "DATASET"). |
| 60 | +
|
| 61 | + Returns: |
| 62 | + dict: Search results or error. |
| 63 | + """ |
| 64 | + try: |
| 65 | + if not project_id: |
| 66 | + return {"status": "ERROR", "error_details": "project_id must be provided."} |
| 67 | + |
| 68 | + dataplex_client = client.get_dataplex_catalog_client( |
| 69 | + credentials=credentials, |
| 70 | + user_agent=[settings.application_name, "search_catalog"], |
| 71 | + ) |
| 72 | + |
| 73 | + query_parts = [] |
| 74 | + if prompt: |
| 75 | + query_parts.append(f"({prompt})") |
| 76 | + |
| 77 | + # Filter by project IDs |
| 78 | + projects_to_filter = project_ids_filter if project_ids_filter else [project_id] |
| 79 | + if projects_to_filter: |
| 80 | + query_parts.append(_construct_search_query_helper("projectid", "=", projects_to_filter)) |
| 81 | + |
| 82 | + # Filter by dataset IDs |
| 83 | + if dataset_ids_filter: |
| 84 | + dataset_resource_filters = [f'linked_resource:"//bigquery.googleapis.com/projects/{pid}/datasets/{did}/*"' for pid in projects_to_filter for did in dataset_ids_filter] |
| 85 | + if dataset_resource_filters: |
| 86 | + query_parts.append(f"({' OR '.join(dataset_resource_filters)})") |
| 87 | + # Filter by entry types |
| 88 | + if types_filter: |
| 89 | + query_parts.append(_construct_search_query_helper("type", "=", types_filter)) |
| 90 | + |
| 91 | + # Always scope to BigQuery system |
| 92 | + query_parts.append("system=BIGQUERY") |
| 93 | + |
| 94 | + full_query = " AND ".join(filter(None, query_parts)) |
| 95 | + |
| 96 | + search_scope = f"projects/{project_id}/locations/{location}" |
| 97 | + |
| 98 | + request = dataplex_v1.SearchEntriesRequest( |
| 99 | + name=search_scope, |
| 100 | + query=full_query, |
| 101 | + page_size=page_size, |
| 102 | + semantic_search=True, |
| 103 | + ) |
| 104 | + |
| 105 | + response = dataplex_client.search_entries(request=request) |
| 106 | + |
| 107 | + results = [] |
| 108 | + for result in response.results: |
| 109 | + entry = result.dataplex_entry |
| 110 | + source = entry.entry_source |
| 111 | + results.append( |
| 112 | + { |
| 113 | + "name": entry.name, |
| 114 | + "display_name": source.display_name or "", |
| 115 | + "entry_type": entry.entry_type, |
| 116 | + "update_time": str(entry.update_time), |
| 117 | + "linked_resource": source.resource or "", |
| 118 | + "description": source.description or "", |
| 119 | + "location": source.location or "", |
| 120 | + } |
| 121 | + ) |
| 122 | + return {"status": "SUCCESS", "results": results} |
| 123 | + |
| 124 | + except api_exceptions.GoogleAPICallError as e: |
| 125 | + logging.exception("search_catalog tool: API call failed") |
| 126 | + return {"status": "ERROR", "error_details": f"Dataplex API Error: {str(e)}"} |
| 127 | + except Exception as ex: |
| 128 | + logging.exception("search_catalog tool: Unexpected error") |
| 129 | + return {"status": "ERROR", "error_details": str(ex)} |
| 130 | + |
0 commit comments