import os from langchain.chains import RetrievalQA from langchain_community.document_loaders import TextLoader from langchain_community.embeddings import OllamaEmbeddings from langchain_community.llms.ollama import Ollama from langchain_community.vectorstores.faiss import FAISS llm = Ollama(model="qwen:7b") embedding = OllamaEmbeddings() if not os.path.exists("var"): root_dir = "/home/cmcc/server/file/pyfiletest/" docs = [] for dirpath, dirnames, filenames in os.walk(root_dir): for file in filenames: try: loader = TextLoader(os.path.join(dirpath, file), encoding="utf-8") docs.extend(loader.load_and_split()) except Exception as e: print(e) pass docsearch = FAISS.from_documents(docs, embedding) docsearch.save_local("var", "index") else: docsearch = FAISS.load_local("var", embedding) qa = RetrievalQA.from_chain_type(llm, chain_type="stuff", retriever=docsearch.as_retriever()) response = qa.run("如何通过历史消息聊天,只给出代码实现") print(response)
标签:docsearch,代码,知识库,langchain,community,本地,var,import,os From: https://www.cnblogs.com/redhat0019/p/18120586