Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
from streamlit_chat import message | |
from langchain.prompts import PromptTemplate | |
from langchain import LLMChain | |
from langchain_community.llms.huggingface_hub import HuggingFaceHub | |
llm = HuggingFaceHub(repo_id="suriya7/MaxMini-Instruct-248M", | |
task ='text2text-generation', | |
huggingfacehub_api_token=os.getenv('HF_TOKEN'), | |
model_kwargs={ | |
"do_sample":True, | |
"max_new_tokens":250 | |
}) | |
template = """ | |
Please Answer the Question: | |
previous chat: {previous_history} | |
Human:{question} | |
chatbot: | |
""" | |
prompt = PromptTemplate(template=template,input_variables=['question','previous_history']) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
verbose=True, | |
) | |
previous_response = "" | |
def conversational_chat(user_query): | |
previous_response = "".join([f"Human: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None]) | |
result = llm_chain.predict( | |
question=user_query, | |
previous_history = previous_response | |
) | |
st.session_state['history'].append((user_query, result)) | |
return result | |
st.title('MaxMini') | |
st.info("MaxMini-Instruct-248M is a T5 (Text-To-Text Transfer Transformer) model fine-tuned on a variety of tasks. This model is designed to perform a range of instructional tasks, enabling users to generate instructions for various inputs.") | |
st.session_state['history'] = [] | |
if 'message' not in st.session_state: | |
st.session_state['message'] = ['Hey There! How Can I Assist You'] | |
st.session_state['past'] = [] | |
# Create containers for chat history and user input | |
response_container = st.container() | |
container = st.container() | |
# User input form | |
user_input = st.chat_input("Ask Your Questions π..") | |
with container: | |
if user_input: | |
output = conversational_chat(user_input) | |
# answer = response_generator(output) | |
st.session_state['past'].append(user_input) | |
st.session_state['message'].append(output) | |
# Display chat history | |
if st.session_state['message']: | |
with response_container: | |
for i in range(len(st.session_state['message'])): | |
if i != 0: | |
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer") | |
message(st.session_state["message"][i], key=str(i), avatar_style="bottts") |