<aside> 📝 参考資料

<aside> 💡 計装するのは トレース + ログ

</aside>

Gemini APIを呼び出すスクリプト

export GOOGLE_API_KEY=AI...

# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "google-genai",
# ]
# ///
from google import genai

googleai_client = genai.Client()
response = googleai_client.models.generate_content(
    model="gemini-2.5-flash",
    contents="OpenTelemetry について短い詩を書いてください。",
)
print(response.text)

計装

# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "google-genai",
#     "opentelemetry-instrumentation-google-genai",
#     "opentelemetry-sdk",
# ]
# ///
import os

from google import genai
from opentelemetry._events import set_event_logger_provider
from opentelemetry._logs import get_logger_provider, set_logger_provider
from opentelemetry.instrumentation.google_genai import GoogleGenAiSdkInstrumentor
from opentelemetry.sdk._events import EventLoggerProvider
from opentelemetry.sdk._logs import LoggerProvider
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
from opentelemetry.trace import get_tracer_provider, set_tracer_provider

set_tracer_provider(TracerProvider())
get_tracer_provider().add_span_processor(BatchSpanProcessor(ConsoleSpanExporter()))

set_logger_provider(LoggerProvider())
get_logger_provider().add_log_record_processor(
    BatchLogRecordProcessor(ConsoleLogExporter())
)
set_event_logger_provider(EventLoggerProvider())

os.environ["OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"] = "true"
GoogleGenAiSdkInstrumentor().instrument()

googleai_client = genai.Client()
response = googleai_client.models.generate_content(
    model="gemini-2.5-flash",
    contents="OpenTelemetry について短い詩を書いてください。",
)
print(response.text)
{
    "body": {
        "content": "OpenTelemetry \\u306b\\u3064\\u3044\\u3066\\u77ed\\u3044\\u8a69\\u3092\\u66f8\\u3044\\u3066\\u304f\\u3060\\u3055\\u3044\\u3002"
    },
    "severity_number": null,
    "severity_text": null,
    "attributes": {
        "gen_ai.system": "gemini"
    },
    "dropped_attributes": 0,
    "timestamp": null,
    "observed_timestamp": "2025-11-07T16:10:11.001160Z",
    "trace_id": "0xe5826adf517c38de8599104dd601f736",
    "span_id": "0x7500e671096be906",
    "trace_flags": 1,
    "resource": {
        "attributes": {
            "telemetry.sdk.language": "python",
            "telemetry.sdk.name": "opentelemetry",
            "telemetry.sdk.version": "1.38.0",
            "service.name": "unknown_service"
        },
        "schema_url": ""
    },
    "event_name": "gen_ai.user.message"
}
{
    "body": {
        "index": 0,
        "content": {
            "parts": [
                {
                    "function_call": null,
                    "code_execution_result": null,
                    "executable_code": null,
                    "file_data": null,
                    "function_response": null,
                    "inline_data": null,
                    "text": "OpenTelemetry\\u3001\\u958b\\u304b\\u308c\\u305f\\u6249\\u3001\\n\\u898b\\u3048\\u306c\\u6d41\\u308c\\u3092\\u3001\\u5149\\u3067\\u7167\\u3089\\u3059\\u3002\\n\\u5206\\u6563\\u3057\\u305f\\u4e16\\u754c\\u3001\\u5171\\u901a\\u306e\\u8a00\\u8449\\u3002\\n\\n\\u30c8\\u30ec\\u30fc\\u30b9\\u306f\\u7b4b\\u9053\\u3001\\u8fbf\\u308b\\u8ecc\\u8de1\\u3001\\n\\u30e1\\u30c8\\u30ea\\u30af\\u30b9\\u306f\\u9f13\\u52d5\\u3001\\u30b7\\u30b9\\u30c6\\u30e0\\u306e\\u606f\\u5439\\u3002\\n\\u30ed\\u30b0\\u306f\\u8a9e\\u308b\\u3001\\u771f\\u5b9f\\u306e\\u97ff\\u304d\\u3002\\n\\n\\u96c6\\u3081\\u3089\\u308c\\u3057\\u30c7\\u30fc\\u30bf\\u3001\\u7d21\\u304c\\u308c\\u308b\\u77e5\\u6075\\u3001\\n\\u95c7\\u306b\\u6f5c\\u3080\\u8b0e\\u3092\\u3001\\u660e\\u308b\\u307f\\u306b\\u51fa\\u3059\\u3002\\n\\u30b7\\u30b9\\u30c6\\u30e0\\u306e\\u672a\\u6765\\u3092\\u3001\\u78ba\\u304b\\u306b\\u5c0e\\u304f\\u3002",
                    "thought": null,

                    "thought_signature": null,
                    "video_metadata": null
                }
            ],
            "role": "model"
        },
        "finish_reason": "STOP"
    },
    "severity_number": null,
    "severity_text": null,
    "attributes": {
        "gen_ai.system": "gemini"
    },
    "dropped_attributes": 0,
    "timestamp": null,
    "observed_timestamp": "2025-11-07T16:10:21.172870Z",
    "trace_id": "0xe5826adf517c38de8599104dd601f736",
    "span_id": "0x7500e671096be906",
    "trace_flags": 1,
    "resource": {
        "attributes": {
            "telemetry.sdk.language": "python",
            "telemetry.sdk.name": "opentelemetry",
            "telemetry.sdk.version": "1.38.0",
            "service.name": "unknown_service"
        },
        "schema_url": ""
    },
    "event_name": "gen_ai.choice"
}
{
    "name": "generate_content gemini-2.5-flash",
    "context": {
        "trace_id": "0xe5826adf517c38de8599104dd601f736",
        "span_id": "0x7500e671096be906",
        "trace_state": "[]"
    },
    "kind": "SpanKind.INTERNAL",
    "parent_id": null,
    "start_time": "2025-11-07T16:10:11.001064Z",
    "end_time": "2025-11-07T16:10:21.173106Z",
    "status": {
        "status_code": "UNSET"
    },
    "attributes": {
        "code.function.name": "google.genai.Models.generate_content",
        "gen_ai.request.model": "gemini-2.5-flash",
        "gen_ai.operation.name": "generate_content",
        "gen_ai.usage.input_tokens": 10,
        "gen_ai.usage.output_tokens": 99,
        "gen_ai.response.finish_reasons": [
            "stop"
        ],
        "gen_ai.system": "gemini"
    },
    "events": [],
    "links": [],
    "resource": {
        "attributes": {
            "telemetry.sdk.language": "python",
            "telemetry.sdk.name": "opentelemetry",
            "telemetry.sdk.version": "1.38.0",
            "service.name": "unknown_service"
        },
        "schema_url": ""
    }
}

発展:genai-processorsを覗く

(※「発展」は一緒に手を動かす時間的余裕がない可能性が高いです)

拙ブログ Googleが発表した genai-processors の Research Agent Example を触る

# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "genai-processors",
#     "opentelemetry-instrumentation-google-genai",
#     "opentelemetry-sdk",
# ]
# ///
import asyncio
import os
import time
from datetime import datetime

from genai_processors import content_api, processor, streams
from genai_processors.examples import research
from opentelemetry._events import set_event_logger_provider
from opentelemetry._logs import get_logger_provider, set_logger_provider
from opentelemetry.instrumentation.google_genai import GoogleGenAiSdkInstrumentor
from opentelemetry.sdk._events import EventLoggerProvider
from opentelemetry.sdk._logs import LoggerProvider
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
from opentelemetry.trace import get_tracer_provider, set_tracer_provider

ProcessorPart = processor.ProcessorPart

def render_part(part: ProcessorPart) -> None:
    if part.substream_name == "status":
        print(f"--- \\n *Status*: {part.text}")
    else:
        try:
            print(part.text)
        except Exception:
            print(f" {part.text} ")

GOOGLE_API_KEY = os.environ["GOOGLE_API_KEY"]

USER_PROMPT = "Research the best things about owning dalmatians!"

async def main():
    input_stream = streams.stream_content([ProcessorPart(USER_PROMPT)])

    output_parts = content_api.ProcessorContent()
    async for content_part in research.ResearchAgent(api_key=GOOGLE_API_KEY)(
        input_stream
    ):
        if content_part.substream_name == "status":
            render_part(content_part)
        output_parts += content_part

    render_part(
        ProcessorPart(f"""# Final synthesized research
  {content_api.as_text(output_parts, substream_name="")}""")
    )

if __name__ == "__main__":
    os.environ["OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"] = "true"

    tracing_file = f"trace-{datetime.now().strftime('%Y%m%d-%H%M%S')}.jsonl"
    with open(tracing_file, "w") as out:
        set_tracer_provider(TracerProvider())
        get_tracer_provider().add_span_processor(
            BatchSpanProcessor(ConsoleSpanExporter(out=out))
        )

        set_logger_provider(LoggerProvider())
        get_logger_provider().add_log_record_processor(
            BatchLogRecordProcessor(ConsoleLogExporter(out=out))
        )
        set_event_logger_provider(EventLoggerProvider())

        GoogleGenAiSdkInstrumentor().instrument()

        asyncio.run(main())

        time.sleep(5)

<aside> 📝 類例 Google先生の新作、Geminiでテキストから任意の情報を抽出する LangExtract をお試し

</aside>

発展:コンソール以外の出力先

拙ブログ OpenTelemetry で google-genai のテレメトリを、ローカル環境に docker run で立てた exporter に送る

docker run -p 4317:4317 -p 4318:4318 --rm -v $(pwd)/collector-config.yaml:/etc/otelcol/config.yaml otel/opentelemetry-collector:0.139.0

receivers:
  otlp:
    protocols:
      grpc:
        endpoint: 0.0.0.0:4317
      http:
        endpoint: 0.0.0.0:4318
exporters:
  debug:
    verbosity: detailed
service:
  pipelines:
    traces:
      receivers: [otlp]
      exporters: [debug]
    metrics:
      receivers: [otlp]
      exporters: [debug]
    logs:
      receivers: [otlp]
      exporters: [debug]

generate.pyを戻す