Copy
Ask AI
"""
Streaming
=============================
Demonstrates streaming agent responses token by token.
"""
from agno.agent import Agent
from agno.models.openai import OpenAIResponses
# ---------------------------------------------------------------------------
# Create Agent
# ---------------------------------------------------------------------------
agent = Agent(
model=OpenAIResponses(id="gpt-5.2"),
markdown=True,
)
# ---------------------------------------------------------------------------
# Run Agent
# ---------------------------------------------------------------------------
if __name__ == "__main__":
# Stream the response token by token
agent.print_response(
"Explain the difference between concurrency and parallelism.",
stream=True,
)
Run the Example
Copy
Ask AI
# Clone and setup repo
git clone https://github.com/agno-agi/agno.git
cd agno/cookbook/02_agents/02_input_output
# Create and activate virtual environment
./scripts/demo_setup.sh
source .venvs/demo/bin/activate
python streaming.py