File size: 2,421 Bytes
66f2e72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d4989ea
66f2e72
d4989ea
66f2e72
 
 
 
 
 
 
d4989ea
66f2e72
d4989ea
 
 
66f2e72
 
d4989ea
66f2e72
 
 
 
 
d56046c
 
 
66f2e72
5d34f4c
 
 
 
4223326
5d34f4c
 
e1a7c4f
4223326
66f2e72
 
 
 
 
 
 
 
 
 
 
 
e1a7c4f
 
66f2e72
 
3e27d5b
e1a7c4f
66f2e72
e1a7c4f
4223326
3e27d5b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import streamlit as st
import os
from streamlit_chat import message
from langchain.prompts import PromptTemplate
from langchain import LLMChain
from langchain_community.llms.huggingface_hub import HuggingFaceHub

llm = HuggingFaceHub(repo_id="suriya7/MaxMini-Instruct-248M",
                     task ='text2text-generation',
                    huggingfacehub_api_token=os.getenv('HF_TOKEN'),
                    model_kwargs={
                    "do_sample":True,
                    "max_new_tokens":250
                    })


template = """Please Answer the Question:{question}"""

prompt = PromptTemplate(template=template,input_variables=['question'])

llm_chain = LLMChain(
    llm=llm,
    prompt=prompt,
    verbose=True,
)

# previous_response = ""
def conversational_chat(user_query):
    # global previous_response
    # previous_response = "".join([f"User: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None])
    # print(f"this is my previous {previous_response}")
    result = llm_chain.predict(
        question=user_query, 
        # chat_history = previous_response
    )
    st.session_state['history'].append((user_query, result))
    return result


st.title("Chat Bot MaxMini:")
st.text("I am MaxMini Your Friendly Assitant")
st.markdown("Built by [Suriya❤️](https://github.com/theSuriya)")

if 'history' not in st.session_state:
    st.session_state['history'] = []

if 'human' not in st.session_state:
    st.session_state['human'] = ["Hello MaxMini"]

if 'assistant' not in st.session_state:
    st.session_state['assistant'] = ['Hey There! How Can I Assist You']
   


# Create containers for chat history and user input
response_container = st.container()
container = st.container()

# User input form
user_input = st.chat_input("Ask Your Questions 👉..")
with container:
    if user_input:
        output = conversational_chat(user_input)
        # answer = response_generator(output)
        st.session_state['human'].append(user_input)
        st.session_state['assistant'].append(output)
        
        
# Display chat history
if st.session_state['assistant']:
    with response_container:
        for i in range(len(st.session_state['assistant'])):
            message(st.session_state["human"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
            message(st.session_state["assistant"][i], key=str(i), avatar_style="bottts")