from phi.agent import Agentfrom phi.knowledge.langchain import LangChainKnowledgeBasefrom langchain.embeddings import OpenAIEmbeddingsfrom langchain.document_loaders import TextLoaderfrom langchain.text_splitter import CharacterTextSplitterfrom langchain.vectorstores import Chromachroma_db_dir = "./chroma_db"def load_vector_store(): state_of_the_union = ws_settings.ws_root.joinpath("data/demo/state_of_the_union.txt") # -*- Load the document raw_documents = TextLoader(str(state_of_the_union)).load() # -*- Split it into chunks text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) documents = text_splitter.split_documents(raw_documents) # -*- Embed each chunk and load it into the vector store Chroma.from_documents(documents, OpenAIEmbeddings(), persist_directory=str(chroma_db_dir))# -*- Get the vectordbdb = Chroma(embedding_function=OpenAIEmbeddings(), persist_directory=str(chroma_db_dir))# -*- Create a retriever from the vector storeretriever = db.as_retriever()# -*- Create a knowledge base from the vector storeknowledge_base = LangChainKnowledgeBase(retriever=retriever)agent = Agent(knowledge_base=knowledge_base, add_references_to_prompt=True)conv.print_response("What did the president say about technology?")