Mr-Vicky-01 commited on
Commit
f5ac43e
1 Parent(s): 90c492c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -2,6 +2,7 @@ import streamlit as st
2
  from PyPDF2 import PdfReader
3
  from langchain.text_splitter import RecursiveCharacterTextSplitter
4
  import os
 
5
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
6
  from langchain.llms import HuggingFaceHub
7
  from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
@@ -48,7 +49,7 @@ def get_conversational_chain():
48
  model = HuggingFaceHub(repo_id="google/gemma-1.1-7b-it",
49
  model_kwargs={"temperature": 0.2,"max_new_tokens":512})
50
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
51
- chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
52
  return chain
53
 
54
  def user_input(user_question):
 
2
  from PyPDF2 import PdfReader
3
  from langchain.text_splitter import RecursiveCharacterTextSplitter
4
  import os
5
+ from langchain.chain import LLMChain
6
  from langchain_google_genai import GoogleGenerativeAIEmbeddings
7
  from langchain.llms import HuggingFaceHub
8
  from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
 
49
  model = HuggingFaceHub(repo_id="google/gemma-1.1-7b-it",
50
  model_kwargs={"temperature": 0.2,"max_new_tokens":512})
51
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
52
+ chain = LLMChain(llm=model, chain_type="stuff", prompt=prompt)
53
  return chain
54
 
55
  def user_input(user_question):