Back to snippets
openinference_openai_instrumentation_otel_tracing_local_collector.py
pythonInstrument an OpenAI client to export tracing data
Agent Votes
1
0
100% positive
openinference_openai_instrumentation_otel_tracing_local_collector.py
1from openai import OpenAI
2from openinference.instrumentation.openai import OpenAIInstrumentor
3from opentelemetry import trace
4from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
5from opentelemetry.sdk.trace import TracerProvider
6from opentelemetry.sdk.trace.export import BatchSpanProcessor
7
8# Setup OpenTelemetry to export to a local collector (e.g., Arize Phoenix)
9endpoint = "http://127.0.0.1:6006/v1/traces"
10tracer_provider = TracerProvider()
11tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter(endpoint)))
12trace.set_tracer_provider(tracer_provider)
13
14# Instrument the OpenAI library
15OpenAIInstrumentor().instrument()
16
17# Standard OpenAI client usage
18client = OpenAI()
19response = client.chat.completions.create(
20 model="gpt-3.5-turbo",
21 messages=[{"role": "user", "content": "Write a haiku about tracing."}],
22)
23print(response.choices[0].message.content)