abaliyan commited on
Commit
2fc40d4
·
verified ·
1 Parent(s): 4663a79

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+
4
+ from langchain import PromptTemplate, HuggingFaceHub, LLMChain
5
+
6
+ st.title("Generating Response with HuggingFace Models")
7
+ st.markdown("## Model: `facebook/blenderbot-1B-distill`")
8
+
9
+ def get_response(question: str) -> dict:
10
+ """
11
+ Generate a response to a given question using the Blenderbot Large Language Model.
12
+
13
+ Args:
14
+ question (str): The question to be answered.
15
+
16
+ Returns:
17
+ dict: A dictionary containing the response text and metadata.
18
+ """
19
+ template = """Question: {question}
20
+
21
+ Answer: Let's think step by step."""
22
+
23
+ prompt = PromptTemplate(template=template, input_variables=["question"])
24
+
25
+ llm_chain = LLMChain(prompt=prompt,
26
+ llm=HuggingFaceHub(repo_id="facebook/blenderbot-1B-distill",
27
+ model_kwargs={"temperature":0,
28
+ "max_length":64}))
29
+
30
+ response = llm_chain.invoke(question)
31
+
32
+ return response
33
+
34
+ question = st.text_area("Enter your question here...")
35
+
36
+ if st.button("Get Response") and question:
37
+ with st.spinner("Generating Response..."):
38
+ answer = get_response(question)
39
+ if answer is not None:
40
+ st.success('Great! Response generated successfully')
41
+ st.write(answer)
42
+ st.write(answer["text"])