localQA / app.py
zeerd's picture
Create app.py
7c119cb verified
raw
history blame
1.15 kB
from langchain.llms import HuggingFaceHub
from langchain.embeddings import SentenceTransformerEmbeddings
from langchain.vectorstores import FAISS
# 1. 初始化 Gemma 模型
llm = HuggingFaceHub(repo_id="google/gemma-7b-it", model_kwargs={"temperature": 0.5, "max_length": 512})
# 2. 准备知识库数据
knowledge_base = [
"Gemma 是 Google 开发的大型语言模型。",
"Gemma 具有强大的自然语言处理能力。",
"Gemma 可以用于问答、对话、文本生成等任务。"
]
# 3. 构建向量数据库
embeddings = SentenceTransformerEmbeddings(model_name="all-mpnet-base-v2")
db = FAISS.from_texts(knowledge_base, embeddings)
# 4. 问答函数
def answer_question(question):
question_embedding = embeddings.embed_query(question)
docs_and_scores = db.similarity_search_with_score(question_embedding)
context = "\n".join([doc.page_content for doc, _ in docs_and_scores])
prompt = f"请根据以下知识库回答问题:\n{context}\n问题:{question}"
answer = llm(prompt)
return answer
# 5. 测试
question = "Gemma 有哪些特点?"
answer = answer_question(question)
print(answer)