bigquery_agent_analytics_plugin - Agent Engine (Not Working) #3951
-
|
Hello, I am trying to implement bigquery_agent_analytics_plugin using agent engine. The SA has the permissions, but I cannot get the logs to be recorded with this new implementation. Has anyone been able to resolve this? Cloud Run works, but not agent engine. I am leaving some sample code in case anyone can help me. agent.py from google.adk.agents.llm_agent import Agent
from google.adk.models.google_llm import Gemini
from log.registro import before_agent_callback, after_agent_callback, before_model_callback, after_model_callback , after_tool_callback
from log.langfuse_config import prompt
from google.genai import types
from google.adk.tools.retrieval import VertexAiRagRetrieval
from vertexai import rag
from utils.template_utils import escape_template_variables
from google.adk.tools.bigquery import BigQueryToolset, BigQueryCredentialsConfig
# --- BigQuery Agent Analytics (logs) ---
import os
import logging
from google.adk.plugins.bigquery_agent_analytics_plugin import (
BigQueryAgentAnalyticsPlugin,
BigQueryLoggerConfig,
)
BQ_PROJECT_ID = "tXXXXXX"
BQ_DATASET_ID = "agent_engine"
BQ_TABLE_ID = "XXXXX"
bq_logging_plugin = BigQueryAgentAnalyticsPlugin(
project_id=BQ_PROJECT_ID,
dataset_id=BQ_DATASET_ID,
table_id=BQ_TABLE_ID,
location="us-central1", # Especificar location explícitamente
)
"""
credentials, _ = google.auth.default(scopes=["https://www.googleapis.com/auth/cloud-platform"])
bigquery_toolset = BigQueryToolset(
credentials_config=BigQueryCredentialsConfig(credentials=credentials)
)
"""
PROMPT = escape_template_variables(prompt.prompt)
LANGFUSE_AGENT_NAME = prompt.config['AGENT_NAME']
MODEL_NAME = prompt.config['MODEL']
PROJECT_ID = prompt.config['PROJECT_ID']
LOCATION = prompt.config['LOCATION']
CORPUS_ID = prompt.config['CORPUS_ID']
RAG_CORPUS_ID = f"projects/{PROJECT_ID}/locations/{LOCATION}/ragCorpora/{CORPUS_ID}"
rag_tool = VertexAiRagRetrieval(
name="BaseConocimiento",
description="Base de datos de conocimiento asociado Dexter Agente",
rag_resources=[rag.RagResource(rag_corpus=RAG_CORPUS_ID)],
)
root_agent = Agent(
name=LANGFUSE_AGENT_NAME,
model=MODEL_NAME,
instruction=PROMPT,
tools=[rag_tool],
before_agent_callback=before_agent_callback,
after_agent_callback=after_agent_callback,
before_model_callback=before_model_callback,
after_model_callback=after_model_callback,
after_tool_callback=after_tool_callback,
)update.py import vertexai
from vertexai import agent_engines
from vertexai.preview import reasoning_engines
from agente.agent import root_agent, bq_logging_plugin
import uuid
vertexai.init(
project="XXXXXXXXX",
location="us-central1",
staging_bucket="gs://XXXXXXXXX"
)
# AdkApp acepta agent y plugins por separado
app_adk = reasoning_engines.AdkApp(
agent=root_agent,
plugins=[bq_logging_plugin],
enable_tracing=True,
)
gcs_dir_name = str(uuid.uuid4())
remote_app = agent_engines.update(
agent_engine=app_adk,
resource_name="projects/XXXXX/locations/us-central1/reasoningEngines/X06XXX68445XXX70XXXX",
service_account="sa-agents@XXXXXX",
requirements=[
"google-adk>=1.21.0",
"google-cloud-aiplatform[adk,agent_engines]>=1.21.0",
"llama-index",
"langfuse",
"google-cloud-secret-manager",
"google-cloud-firestore",
"google-cloud-bigquery",
"google-cloud-bigquery-storage",
"google-cloud-storage",
],
extra_packages=["./agente","./log","./utils"],
gcs_dir_name=f"tXXXXX{gcs_dir_name}",
env_vars={
"GOOGLE_CLOUD_AGENT_ENGINE_ENABLE_TELEMETRY": "true",
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT": "true",
}
)
|
Beta Was this translation helpful? Give feedback.
Replies: 5 comments 21 replies
-
|
Hi @dvdeveloper thanks for reporting this bug, can you do below steps to help figure out the root cause? Check Cloud Logging: Go to the Google Cloud Console, navigate to Logs Explorer, and filter by resource.type="reasoning_engine". Look for any "Permission Denied" or "Field not found" errors related to BigQuery or the ADK internal processes. Version Match: Ensure that the google-adk and google-cloud-aiplatform versions are consistent between your local development (where Cloud Run likely works) and the requirements you've listed in update.py. Also make sure google-adk version running on Agent Engine is >= 1.21.0 |
Beta Was this translation helpful? Give feedback.
-
import os
import shutil
from google.adk.agents import Agent
from google.adk.models.google_llm import Gemini
from google.adk.plugins.bigquery_agent_analytics_plugin import BigQueryAgentAnalyticsPlugin
from google.adk.tools.bigquery import BigQueryCredentialsConfig, BigQueryToolset
import google.auth
import vertexai
from vertexai import agent_engines
# --- Configuration ---
PROJECT_ID = "haiyuan-dev"
LOCATION = "us-central1"
STAGING_BUCKET = "gs://haiyuan-adk-agent-staging-v3"
# ... [Checks for env vars] ...
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
BQ_DATASET_ID = "agent_logs"
BQ_TABLE_ID = "0123_invocation_logs_complex"
vertexai.init(
project=PROJECT_ID, location=LOCATION, staging_bucket=STAGING_BUCKET
)
client = vertexai.Client(project=PROJECT_ID, location=LOCATION)
bq_logger_plugin = BigQueryAgentAnalyticsPlugin(
project_id=PROJECT_ID,
dataset_id=BQ_DATASET_ID,
table_id=BQ_TABLE_ID,
)
try:
credentials, _ = google.auth.default(
scopes=["https://www.googleapis.com/auth/cloud-platform"]
)
except google.auth.exceptions.DefaultCredentialsError:
raise RuntimeError("Could not get default credentials.")
bq_creds_config = BigQueryCredentialsConfig(credentials=credentials)
bigquery_toolset = BigQueryToolset(credentials_config=bq_creds_config)
llm = Gemini(model="gemini-2.5-flash")
root_agent = Agent(
model=llm,
name="my_bq_agent",
instruction=(
"You are a helpful assistant. You can use BigQuery tools to answer"
" questions about data."
),
tools=[bigquery_toolset],
)
app_to_deploy = agent_engines.AdkApp(
agent=root_agent, plugins=[bq_logger_plugin]
)
# --- 1. Prepare Local Dependencies Directory ---
# Define paths
local_whl_source = "/dist/google_adk-1.23.0-py3-none-any.whl"
dep_dir = "./adk_dependencies" # Local temporary directory
whl_basename = os.path.basename(local_whl_source)
local_whl_dest = os.path.join(dep_dir, whl_basename)
# Create directory and copy file
if not os.path.exists(local_whl_source):
raise FileNotFoundError(f"Source wheel not found: {local_whl_source}")
if os.path.exists(dep_dir):
shutil.rmtree(dep_dir) # Clean up previous runs
os.makedirs(dep_dir)
shutil.copy(local_whl_source, local_whl_dest)
print(f"Prepared local dependency: {local_whl_dest}")
# --- 2. Deploy ---
print(f"Deploying agent to Vertex AI Agent Engine...")
try:
remote_app = client.agent_engines.create(
agent=app_to_deploy,
config={
"display_name": "my-adk-app-with-bq-and-logger-v8",
"staging_bucket": STAGING_BUCKET,
"requirements": [
"google-cloud-aiplatform[agent_engines]",
# Reference the file inside the uploaded directory
f"adk_dependencies/{whl_basename}",
"google-cloud-bigquery",
"google-auth",
"db-dtypes",
"pyarrow",
"pydantic",
"cloudpickle",
],
# Upload the entire directory. It should appear as /code/adk_dependencies/ remotely.
"extra_packages": [dep_dir],
},
)
print(f"Deployed agent: {remote_app.api_resource.name}")
except Exception as e:
print(f"Failed to deploy agent: {e}")
import traceback
traceback.print_exc()
finally:
# Optional: Cleanup the temporary directory
# if os.path.exists(dep_dir):
# shutil.rmtree(dep_dir)
pass |
Beta Was this translation helpful? Give feedback.
-
|
Okay I've somehow managed to get this working now, but I'm not entirely sure what the real fix was. I think I was using the In particular I think |
Beta Was this translation helpful? Give feedback.
-
# requirements:
# pip install "google-cloud-aiplatform[agent_engines]>=1.126.0"
# auth:
# gcloud auth application-default login
import os
import uuid
import vertexai
# ---- Fill these in (or set as env vars) ----
PROJECT_ID = "haiyuan-dev
"
LOCATION = "us-central1"
# --- UPDATE THIS ---
AGENT_ID = "5718706211109470208"
# -------------------
AGENT_NAME = (
f"projects/{PROJECT_ID}/locations/{LOCATION}/reasoningEngines/{AGENT_ID}"
)
def main():
vertexai.init(project=PROJECT_ID, location=LOCATION)
client = vertexai.Client(project=PROJECT_ID, location=LOCATION)
try:
print(f"Fetching agent: {AGENT_NAME}")
# This returns an 'AgentEngine' object in your SDK version
agent = client.agent_engines.get(name=AGENT_NAME)
user_id = f"interactive_user_{uuid.uuid4().hex[:8]}"
print(f"Starting interactive session with user_id: {user_id}")
print("Type 'exit' or 'quit' to end.\n")
while True:
user_message = input("You: ")
if user_message.lower() in ("exit", "quit"):
print("Exiting.")
break
print("Agent: ", end="", flush=True)
try:
# REVERTED to standard stream_query matching your original working code
for chunk in agent.stream_query(message=user_message, user_id=user_id):
print(chunk, end="", flush=True)
print("\n")
except Exception as e:
print(f"\nError during query: {e}")
except Exception as e:
print(f"\nAn error occurred while fetching the agent: {e}")
if __name__ == "__main__":
main()@Dustan-FullStory shared in my deployment script and test script here. I've tested the latest version package by myself and it works as expected |
Beta Was this translation helpful? Give feedback.
-
|
https://github.com/haiyuan-eng-google/exmaples-BigQuery-agent-analytics-plugin please check here for more example code for leverage the plugin in agent engine |
Beta Was this translation helpful? Give feedback.

Sorry it is this flag from the
adk deploy agent_engine --helpouput:In the case of this agent I am setting that flag to a value of
appwhich is the variable name of thegoogle.adk.apps.Appobject as defined in myagent.py