Spaces:
Running
Running
Improved chat ui, integrated full darija
Browse files
app.py
CHANGED
@@ -1,19 +1,25 @@
|
|
1 |
import streamlit as st
|
2 |
-
|
3 |
-
import
|
|
|
4 |
import requests
|
5 |
from gradio_client import Client
|
6 |
|
|
|
|
|
7 |
API_TOKEN = st.secrets['HF_TOKEN']
|
8 |
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
|
9 |
headers = {"Authorization": f"Bearer {str(API_TOKEN)}"}
|
|
|
|
|
|
|
10 |
|
11 |
def query(payload):
|
12 |
response = requests.post(API_URL, headers=headers, json=payload)
|
13 |
return response.json()
|
14 |
|
15 |
def translate(text,source="English",target="Moroccan Arabic"):
|
16 |
-
client = Client("https://facebook-seamless-m4t-v2-large.hf.space/--replicas/
|
17 |
result = client.predict(
|
18 |
text, # str in 'Input text' Textbox component
|
19 |
source, # Literal[Afrikaans, Amharic, Armenian, Assamese, Basque, Belarusian, Bengali, Bosnian, Bulgarian, Burmese, Cantonese, Catalan, Cebuano, Central Kurdish, Croatian, Czech, Danish, Dutch, Egyptian Arabic, English, Estonian, Finnish, French, Galician, Ganda, Georgian, German, Greek, Gujarati, Halh Mongolian, Hebrew, Hindi, Hungarian, Icelandic, Igbo, Indonesian, Irish, Italian, Japanese, Javanese, Kannada, Kazakh, Khmer, Korean, Kyrgyz, Lao, Lithuanian, Luo, Macedonian, Maithili, Malayalam, Maltese, Mandarin Chinese, Marathi, Meitei, Modern Standard Arabic, Moroccan Arabic, Nepali, North Azerbaijani, Northern Uzbek, Norwegian Bokmål, Norwegian Nynorsk, Nyanja, Odia, Polish, Portuguese, Punjabi, Romanian, Russian, Serbian, Shona, Sindhi, Slovak, Slovenian, Somali, Southern Pashto, Spanish, Standard Latvian, Standard Malay, Swahili, Swedish, Tagalog, Tajik, Tamil, Telugu, Thai, Turkish, Ukrainian, Urdu, Vietnamese, Welsh, West Central Oromo, Western Persian, Yoruba, Zulu] in 'Source language' Dropdown component
|
@@ -25,7 +31,11 @@ def translate(text,source="English",target="Moroccan Arabic"):
|
|
25 |
|
26 |
|
27 |
# Function to generate a response from the chatbot
|
28 |
-
def
|
|
|
|
|
|
|
|
|
29 |
location = 'Benguerir, Morocco'
|
30 |
soil_type = 'red soil'
|
31 |
humidity = '40%'
|
@@ -37,13 +47,14 @@ def get_bot_response(user_input):
|
|
37 |
# For simplicity, the bot echoes the user's input in this example
|
38 |
|
39 |
instruction = f'''
|
40 |
-
<s> [INST] You are an agriculture expert,
|
41 |
Location: {location},
|
|
|
42 |
land type: {soil_type}
|
43 |
humidity: {humidity}
|
44 |
weather: {weather}
|
45 |
temperature: {temp}
|
46 |
-
agriculture: {agriculture} [/INST]</s>
|
47 |
'''
|
48 |
prompt = f'''
|
49 |
You are an agriculture expert, Given the following informations, geographical coordinates (latitude and longitude), prevailing weather conditions, specific land type, chosen type of agriculture, and soil composition of a designated area, request the LLM to provide detailed insights and predictions on optimal agricultural practices, potential crop yields, and recommended soil management strategies, or answer the question below
|
@@ -60,37 +71,67 @@ agriculture: {agriculture}
|
|
60 |
# ANSWER:
|
61 |
# ''',})
|
62 |
|
63 |
-
output = query({"inputs": instruction, "parameters":{"max_new_tokens":250, "temperature":
|
64 |
-
print(headers)
|
|
|
65 |
print(output)
|
66 |
return f"Bot: {translate(output[0]['generated_text'])}"
|
67 |
|
68 |
-
# Streamlit app
|
69 |
def main():
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
|
|
|
|
|
|
|
|
|
|
90 |
|
91 |
if st.button("Clear Chat"):
|
92 |
-
st.session_state
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
|
94 |
-
# Run the Streamlit app
|
95 |
if __name__ == "__main__":
|
96 |
-
main()
|
|
|
1 |
import streamlit as st
|
2 |
+
from streamlit_chat import message
|
3 |
+
from streamlit_extras.colored_header import colored_header
|
4 |
+
from streamlit_extras.add_vertical_space import add_vertical_space
|
5 |
import requests
|
6 |
from gradio_client import Client
|
7 |
|
8 |
+
st.set_page_config(page_title="HugChat - An LLM-powered Streamlit app")
|
9 |
+
|
10 |
API_TOKEN = st.secrets['HF_TOKEN']
|
11 |
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
|
12 |
headers = {"Authorization": f"Bearer {str(API_TOKEN)}"}
|
13 |
+
def get_text():
|
14 |
+
input_text = st.text_input("You: ", "", key="input")
|
15 |
+
return input_text
|
16 |
|
17 |
def query(payload):
|
18 |
response = requests.post(API_URL, headers=headers, json=payload)
|
19 |
return response.json()
|
20 |
|
21 |
def translate(text,source="English",target="Moroccan Arabic"):
|
22 |
+
client = Client("https://facebook-seamless-m4t-v2-large.hf.space/--replicas/2bmbx/")
|
23 |
result = client.predict(
|
24 |
text, # str in 'Input text' Textbox component
|
25 |
source, # Literal[Afrikaans, Amharic, Armenian, Assamese, Basque, Belarusian, Bengali, Bosnian, Bulgarian, Burmese, Cantonese, Catalan, Cebuano, Central Kurdish, Croatian, Czech, Danish, Dutch, Egyptian Arabic, English, Estonian, Finnish, French, Galician, Ganda, Georgian, German, Greek, Gujarati, Halh Mongolian, Hebrew, Hindi, Hungarian, Icelandic, Igbo, Indonesian, Irish, Italian, Japanese, Javanese, Kannada, Kazakh, Khmer, Korean, Kyrgyz, Lao, Lithuanian, Luo, Macedonian, Maithili, Malayalam, Maltese, Mandarin Chinese, Marathi, Meitei, Modern Standard Arabic, Moroccan Arabic, Nepali, North Azerbaijani, Northern Uzbek, Norwegian Bokmål, Norwegian Nynorsk, Nyanja, Odia, Polish, Portuguese, Punjabi, Romanian, Russian, Serbian, Shona, Sindhi, Slovak, Slovenian, Somali, Southern Pashto, Spanish, Standard Latvian, Standard Malay, Swahili, Swedish, Tagalog, Tajik, Tamil, Telugu, Thai, Turkish, Ukrainian, Urdu, Vietnamese, Welsh, West Central Oromo, Western Persian, Yoruba, Zulu] in 'Source language' Dropdown component
|
|
|
31 |
|
32 |
|
33 |
# Function to generate a response from the chatbot
|
34 |
+
def generate_response(user_input):
|
35 |
+
|
36 |
+
user_input_translated = str(translate(user_input, "Moroccan Arabic", "English"))
|
37 |
+
name = 'Abbas'
|
38 |
+
date = 'December'
|
39 |
location = 'Benguerir, Morocco'
|
40 |
soil_type = 'red soil'
|
41 |
humidity = '40%'
|
|
|
47 |
# For simplicity, the bot echoes the user's input in this example
|
48 |
|
49 |
instruction = f'''
|
50 |
+
<s> [INST] You are an agriculture expert, and my name is {name} Given the following informations, prevailing weather conditions, specific land type, chosen type of agriculture, and soil composition of a designated area, answer the question below
|
51 |
Location: {location},
|
52 |
+
Current Month : {date}
|
53 |
land type: {soil_type}
|
54 |
humidity: {humidity}
|
55 |
weather: {weather}
|
56 |
temperature: {temp}
|
57 |
+
agriculture: {agriculture} Question: {user_input_translated}[/INST]</s>
|
58 |
'''
|
59 |
prompt = f'''
|
60 |
You are an agriculture expert, Given the following informations, geographical coordinates (latitude and longitude), prevailing weather conditions, specific land type, chosen type of agriculture, and soil composition of a designated area, request the LLM to provide detailed insights and predictions on optimal agricultural practices, potential crop yields, and recommended soil management strategies, or answer the question below
|
|
|
71 |
# ANSWER:
|
72 |
# ''',})
|
73 |
|
74 |
+
output = query({"inputs": instruction, "parameters":{"max_new_tokens":250, "temperature":1, "return_full_text":False}})
|
75 |
+
# print(headers)
|
76 |
+
print(instruction)
|
77 |
print(output)
|
78 |
return f"Bot: {translate(output[0]['generated_text'])}"
|
79 |
|
|
|
80 |
def main():
|
81 |
+
# Sidebar contents
|
82 |
+
with st.sidebar:
|
83 |
+
st.title('🤗💬 HugChat App')
|
84 |
+
st.markdown('''
|
85 |
+
## About
|
86 |
+
This app is an LLM-powered chatbot built using:
|
87 |
+
- [Streamlit](https://streamlit.io/)
|
88 |
+
- [HugChat](https://github.com/Soulter/hugging-chat-api)
|
89 |
+
- [OpenAssistant/oasst-sft-6-llama-30b-xor](https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor) LLM model
|
90 |
+
|
91 |
+
💡 Note: No API key required!
|
92 |
+
''')
|
93 |
+
add_vertical_space(5)
|
94 |
+
st.write('Made with ❤️ by [Data Professor](https://youtube.com/dataprofessor)')
|
95 |
+
|
96 |
+
# Generate empty lists for generated and past.
|
97 |
+
## generated stores AI generated responses
|
98 |
+
if 'generated' not in st.session_state:
|
99 |
+
st.session_state['generated'] = ["I'm HugChat, How may I help you?"]
|
100 |
+
## past stores User's questions
|
101 |
+
if 'past' not in st.session_state:
|
102 |
+
st.session_state['past'] = ['Hi!']
|
103 |
+
|
104 |
+
# Layout of input/response containers
|
105 |
+
input_container = st.container()
|
106 |
|
107 |
if st.button("Clear Chat"):
|
108 |
+
st.session_state['past'] = []
|
109 |
+
st.session_state['generated'] = []
|
110 |
+
|
111 |
+
colored_header(label='', description='', color_name='blue-30')
|
112 |
+
response_container = st.container()
|
113 |
+
|
114 |
+
# User input
|
115 |
+
## Function for taking user provided prompt as input
|
116 |
+
|
117 |
+
## Applying the user input box
|
118 |
+
with input_container:
|
119 |
+
user_input = get_text()
|
120 |
+
|
121 |
+
# Response output
|
122 |
+
## Function for taking user prompt as input followed by producing AI generated responses
|
123 |
+
|
124 |
+
## Conditional display of AI generated responses as a function of user provided prompts
|
125 |
+
with response_container:
|
126 |
+
if user_input:
|
127 |
+
response = generate_response(user_input)
|
128 |
+
st.session_state.past.append(user_input)
|
129 |
+
st.session_state.generated.append(response)
|
130 |
+
|
131 |
+
if st.session_state['generated']:
|
132 |
+
for i in range(len(st.session_state['generated'])):
|
133 |
+
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
|
134 |
+
message(st.session_state["generated"][i], key=str(i))
|
135 |
|
|
|
136 |
if __name__ == "__main__":
|
137 |
+
main()
|