import os
import grpc
import openai
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from arize.otel import register
from openinference.instrumentation.openllmetry import OpenInferenceSpanProcessor
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
# Set your OpenAI API key
os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
# Set up Arize credentials
SPACE_ID = os.getenv("SPACE_ID")
API_KEY = os.getenv("API_KEY")
tracer_provider = register(
space_id=SPACE_ID,
api_key=API_KEY,
project_name="openllmetry-integration",
set_global_tracer_provider=True,
)
tracer_provider.add_span_processor(OpenInferenceSpanProcessor())
tracer_provider.add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
endpoint="http://localhost:4317", #if using phoenix cloud, change to phoenix cloud endpoint (phoenix cloud space -> settings -> endpoint/hostname)
headers={
"authorization": f"Bearer {API_KEY}",
"api_key": API_KEY,
"arize-space-id": SPACE_ID,
"arize-interface": "python",
"user-agent": "arize-python",
},
compression=grpc.Compression.Gzip, # use enum instead of string
)
)
)
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)
# Define and invoke your OpenAI model
client = openai.OpenAI()
messages = [
{"role": "user", "content": "What is the national food of Yemen?"}
]
response = client.chat.completions.create(
model="gpt-4",
messages=messages,
)
# Now view your converted OpenLLMetry traces in Phoenix!