Copy
Ask AI
"""Structured output example using Ollama with the OpenAI Responses API.
This demonstrates using Pydantic models for structured output with Ollama's
Responses API endpoint.
Requirements:
- Ollama v0.13.3 or later running locally
- Run: ollama pull llama3.1:8b
"""
from typing import List
from agno.agent import Agent
from agno.models.ollama import OllamaResponses
from pydantic import BaseModel, Field
# ---------------------------------------------------------------------------
# Create Agent
# ---------------------------------------------------------------------------
class MovieScript(BaseModel):
name: str = Field(..., description="Give a name to this movie")
setting: str = Field(
..., description="Provide a nice setting for a blockbuster movie."
)
ending: str = Field(
...,
description="Ending of the movie. If not available, provide a happy ending.",
)
genre: str = Field(
...,
description="Genre of the movie. If not available, select action, thriller or romantic comedy.",
)
characters: List[str] = Field(..., description="Name of characters for this movie.")
storyline: str = Field(
..., description="3 sentence storyline for the movie. Make it exciting!"
)
agent = Agent(
model=OllamaResponses(id="gpt-oss:20b"),
description="You write movie scripts.",
output_schema=MovieScript,
)
agent.print_response("New York")
# ---------------------------------------------------------------------------
# Run Agent
# ---------------------------------------------------------------------------
if __name__ == "__main__":
pass
Run the Example
Copy
Ask AI
# Clone and setup repo
git clone https://github.com/agno-agi/agno.git
cd agno/cookbook/90_models/ollama/responses
# Create and activate virtual environment
./scripts/demo_setup.sh
source .venvs/demo/bin/activate
python structured_output.py