Copy
Ask AI
"""
LiteLLM Reasoning Agent Example
This example demonstrates using reasoning models through LiteLLM.
The reasoning_content from the model response is extracted and displayed.
Supported reasoning models through LiteLLM:
- deepseek/deepseek-reasoner (DeepSeek R1)
"""
from agno.agent import Agent
from agno.models.litellm import LiteLLM
# ---------------------------------------------------------------------------
# Create Agent
# ---------------------------------------------------------------------------
task = "9.11 and 9.9 -- which is bigger?"
# Using DeepSeek R1 through LiteLLM
agent = Agent(
model=LiteLLM(
id="deepseek/deepseek-reasoner",
),
markdown=True,
)
agent.print_response(task, stream=True, stream_events=True, show_reasoning=True)
# ---------------------------------------------------------------------------
# Run Agent
# ---------------------------------------------------------------------------
if __name__ == "__main__":
pass
Run the Example
Copy
Ask AI
# Clone and setup repo
git clone https://github.com/agno-agi/agno.git
cd agno/cookbook/90_models/litellm
# Create and activate virtual environment
./scripts/demo_setup.sh
source .venvs/demo/bin/activate
python reasoning_agent.py