from phi.agent import Agentfrom phi.model.openai import OpenAIChatfrom rich.pretty import pprintagent = Agent( model=OpenAIChat(id="gpt-4o"), # Set add_history_to_messages=true to add the previous chat history to the messages sent to the Model. add_history_to_messages=True, # Number of historical responses to add to the messages. num_history_responses=3, description="You are a helpful assistant that always responds in a polite, upbeat and positive manner.",)# -*- Create a runagent.print_response("Share a 2 sentence horror story", stream=True)# -*- Print the messages in the memorypprint([m.model_dump(include={"role", "content"}) for m in agent.memory.messages])# -*- Ask a follow up question that continues the conversationagent.print_response("What was my first message?", stream=True)# -*- Print the messages in the memorypprint([m.model_dump(include={"role", "content"}) for m in agent.memory.messages])
The built-in memory only lasts while the session is active. To persist memory across sessions, we can store Agent sessions in a database using AgentStorage.
Storage is a necessary component when building user facing AI products as any production application will require users to be able to “continue” their conversation with the Agent.
Let’s test this out, create a file persistent_memory.py with the following code:
persistent_memory.py
import jsonfrom rich.console import Consolefrom rich.panel import Panelfrom rich.json import JSONfrom phi.agent import Agentfrom phi.model.openai import OpenAIChatfrom phi.storage.agent.sqlite import SqlAgentStorageagent = Agent( model=OpenAIChat(id="gpt-4o"), # Store agent sessions in a database storage=SqlAgentStorage(table_name="agent_sessions", db_file="tmp/agent_storage.db"), # Set add_history_to_messages=true to add the previous chat history to the messages sent to the Model. add_history_to_messages=True, # Number of historical responses to add to the messages. num_history_responses=3, # The session_id is used to identify the session in the database # You can resume any session by providing a session_id # session_id="xxxx-xxxx-xxxx-xxxx", # Description creates a system prompt for the agent description="You are a helpful assistant that always responds in a polite, upbeat and positive manner.",)console = Console()def print_chat_history(agent): # -*- Print history console.print( Panel( JSON(json.dumps([m.model_dump(include={"role", "content"}) for m in agent.memory.messages]), indent=4), title=f"Chat History for session_id: {agent.session_id}", expand=True, ) )# -*- Create a runagent.print_response("Share a 2 sentence horror story", stream=True)# -*- Print the chat historyprint_chat_history(agent)# -*- Ask a follow up question that continues the conversationagent.print_response("What was my first message?", stream=True)# -*- Print the chat historyprint_chat_history(agent)
Along with storing chat history and run messages, AgentMemory can be extended to automatically classify and store user preferences and conversation summaries.
To do this, add a db to AgentMemory and set create_user_memories=True and create_session_summary=True
User memories are stored in the AgentMemory whereas session summaries are stored in the AgentStorage table with the rest of the session information.
User preferences and conversation summaries are currently only compatible with
OpenAI and OpenAILike models. While Persistent Memory is compatible with
all model providers.
from rich.pretty import pprintfrom phi.agent import Agent, AgentMemoryfrom phi.model.openai import OpenAIChatfrom phi.memory.db.postgres import PgMemoryDbfrom phi.storage.agent.postgres import PgAgentStoragedb_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"agent = Agent( model=OpenAIChat(id="gpt-4o"), # Store the memories and summary in a database memory=AgentMemory( db=PgMemoryDb(table_name="agent_memory", db_url=db_url), create_user_memories=True, create_session_summary=True ), # Store agent sessions in a database storage=PgAgentStorage(table_name="personalized_agent_sessions", db_url=db_url), # Show debug logs so you can see the memory being created # debug_mode=True,)# -*- Share personal informationagent.print_response("My name is john billings?", stream=True)# -*- Print memoriespprint(agent.memory.memories)# -*- Print summarypprint(agent.memory.summary)# -*- Share personal informationagent.print_response("I live in nyc?", stream=True)# -*- Print memoriespprint(agent.memory.memories)# -*- Print summarypprint(agent.memory.summary)# -*- Share personal informationagent.print_response("I'm going to a concert tomorrow?", stream=True)# -*- Print memoriespprint(agent.memory.memories)# -*- Print summarypprint(agent.memory.summary)# Ask about the conversationagent.print_response("What have we been talking about, do you know my name?", stream=True)