# Import open-telemetry dependencies
import ollama
from arize.otel import Endpoint, register
from openai import OpenAI
# Declare model name
LLAMA_MODEL_NAME = "llama3.2:1b"
# Download the llama3.2:1b model to run locally.
ollama.pull(OLLAMA_MODEL_NAME)
# Setup OTEL via our convenience function
register(
space_id = "your-space-id", # in app space settings page
api_key = "your-api-key", # in app space settings page
model_id = "your-model-id", # name this to whatever you would like
)
# Import the automatic instrumentor from OpenInference
from openinference.instrumentation.openai import OpenAIInstrumentor
# Finish automatic instrumentation
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)
client = OpenAI(
base_url = 'http://localhost:11434/v1',
api_key='ollama', # required, but unused
)
query = "Why is the sky blue?"
response = oai_client.chat.completions.create(
model=LLAMA_MODEL_NAME,
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": query},
]
)