Code
cookbook/models/llama_cpp/structured_output.py
Copy
Ask AI
from typing import List
from agno.agent import Agent
from agno.models.llama_cpp import LlamaCpp
from agno.run.agent import RunOutput
from pydantic import BaseModel, Field
from rich.pretty import pprint # noqa
class MovieScript(BaseModel):
name: str = Field(..., description="Give a name to this movie")
setting: str = Field(
..., description="Provide a nice setting for a blockbuster movie."
)
ending: str = Field(
...,
description="Ending of the movie. If not available, provide a happy ending.",
)
genre: str = Field(
...,
description="Genre of the movie. If not available, select action, thriller or romantic comedy.",
)
characters: List[str] = Field(..., description="Name of characters for this movie.")
storyline: str = Field(
..., description="3 sentence storyline for the movie. Make it exciting!"
)
# Agent that returns a structured output
structured_output_agent = Agent(
model=LlamaCpp(id="ggml-org/gpt-oss-20b-GGUF"),
description="You write movie scripts.",
output_schema=MovieScript,
)
# Run the agent synchronously
structured_output_response: RunOutput = structured_output_agent.run("New York")
pprint(structured_output_response.content)
Usage
1
Create a virtual environment
Open the
Terminal
and create a python virtual environment.Copy
Ask AI
python3 -m venv .venv
source .venv/bin/activate
2
Install LlamaCpp
Follow the LlamaCpp installation guide and start the server:
Copy
Ask AI
llama-server -hf ggml-org/gpt-oss-20b-GGUF --ctx-size 0 --jinja -ub 2048 -b 2048
3
Install libraries
Copy
Ask AI
pip install -U pydantic rich agno
4
Run Agent
Copy
Ask AI
python cookbook/models/llama_cpp/structured_output.py