from arize.otel import registerfrom openinference.instrumentation.openai import OpenAIInstrumentor# Setup OpenTelemetrytracer_provider = register( space_id="your-space-id", api_key="your-api-key", project_name="my-llm-app",)# Instrument your LLM libraryOpenAIInstrumentor().instrument(tracer_provider=tracer_provider)# Your LLM calls are now automatically traced
Supported frameworks: OpenAI, LangChain, LlamaIndex, Anthropic, Bedrock, and more through OpenInference instrumentors.Learn more:Tracing Documentation