Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
from streamlit_chat import message | |
from langchain.prompts import PromptTemplate | |
from langchain import LLMChain | |
from langchain_community.llms.huggingface_hub import HuggingFaceHub | |
llm = HuggingFaceHub(repo_id="suriya7/MaxMini-Instruct-248M", | |
task ='text2text-generation', | |
huggingfacehub_api_token=os.getenv('HF_TOKEN'), | |
model_kwargs={ | |
"do_sample":True, | |
"max_new_tokens":250 | |
}) | |
template = """Please Answer the Question:{question}""" | |
prompt = PromptTemplate(template=template,input_variables=['question']) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
verbose=True, | |
) | |
# previous_response = "" | |
def conversational_chat(user_query): | |
# global previous_response | |
# previous_response = "".join([f"User: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None]) | |
# print(f"this is my previous {previous_response}") | |
result = llm_chain.predict( | |
question=user_query, | |
# chat_history = previous_response | |
) | |
st.session_state['history'].append((user_query, result)) | |
return result | |
st.title("Chat Bot MaxMini:") | |
st.text("I am MaxMini Your Friendly Assitant") | |
st.markdown("Built by [Suriya❤️](https://github.com/theSuriya)") | |
if 'history' not in st.session_state: | |
st.session_state['history'] = [] | |
if 'human' not in st.session_state: | |
st.session_state['human'] = ["Hello MaxMini"] | |
if 'assistant' not in st.session_state: | |
st.session_state['assistant'] = ['Hey There! How Can I Assist You'] | |
# Create containers for chat history and user input | |
response_container = st.container() | |
container = st.container() | |
# User input form | |
user_input = st.chat_input("Ask Your Questions 👉..") | |
with container: | |
if user_input: | |
output = conversational_chat(user_input) | |
# answer = response_generator(output) | |
st.session_state['human'].append(user_input) | |
st.session_state['assistant'].append(output) | |
# Display chat history | |
if st.session_state['assistant']: | |
with response_container: | |
for i in range(len(st.session_state['assistant'])): | |
message(st.session_state["human"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer") | |
message(st.session_state["assistant"][i], key=str(i), avatar_style="bottts") |