Copy
Ask AI
"""
Langfuse Via OpenLIT
====================
Demonstrates sending Agno traces to Langfuse through OpenLIT.
"""
import base64
import os
from agno.agent import Agent
from agno.models.openai import OpenAIChat
from agno.tools.websearch import WebSearchTools
# ---------------------------------------------------------------------------
# Setup
# ---------------------------------------------------------------------------
LANGFUSE_AUTH = base64.b64encode(
f"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}".encode()
).decode()
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (
"https://us.cloud.langfuse.com/api/public/otel" # US data region
)
# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://cloud.langfuse.com/api/public/otel" # EU data region
# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "http://localhost:3000/api/public/otel" # Local deployment (>= v3.22.0)
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"
from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( # noqa: E402
OTLPSpanExporter,
)
from opentelemetry.sdk.trace import TracerProvider # noqa: E402
from opentelemetry.sdk.trace.export import SimpleSpanProcessor # noqa: E402
trace_provider = TracerProvider()
trace_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter()))
# Sets the global default tracer provider
from opentelemetry import trace # noqa: E402
trace.set_tracer_provider(trace_provider)
# Creates a tracer from the global tracer provider
tracer = trace.get_tracer(__name__)
import openlit # noqa: E402
# Initialize OpenLIT instrumentation. The disable_batch flag is set to true to process traces immediately.
openlit.init(tracer=tracer, disable_batch=True)
# ---------------------------------------------------------------------------
# Create Agent
# ---------------------------------------------------------------------------
agent = Agent(
model=OpenAIChat(id="gpt-4o-mini"),
tools=[WebSearchTools()],
markdown=True,
debug_mode=True,
)
# ---------------------------------------------------------------------------
# Run Example
# ---------------------------------------------------------------------------
if __name__ == "__main__":
agent.print_response("What is currently trending on Twitter?")
Run the Example
Copy
Ask AI
# Clone and setup repo
git clone https://github.com/agno-agi/agno.git
cd agno/cookbook/92_integrations/observability
# Create and activate virtual environment
./scripts/demo_setup.sh
source .venvs/demo/bin/activate
# Export relevant API keys
export LANGFUSE_PUBLIC_KEY="***"
export LANGFUSE_SECRET_KEY="***"
export OTEL_EXPORTER_OTLP_ENDPOINT="***"
export OTEL_EXPORTER_OTLP_HEADERS="***"
python langfuse_via_openlit.py