awacke1 commited on
Commit
ce8c4ac
·
verified ·
1 Parent(s): ad55944

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -49
app.py CHANGED
@@ -10,9 +10,6 @@ import torch
10
  import random
11
  import pandas as pd
12
  from datetime import datetime
13
- import base64
14
- import io
15
- import json
16
 
17
  default_lang = "en"
18
  engines = { default_lang: Model(default_lang) }
@@ -42,27 +39,16 @@ def randomize_seed_fn(seed: int) -> int:
42
  return seed
43
 
44
  system_instructions1 = """
45
- [SYSTEM] Answer as Dr. Nova Quantum, a brilliant 50-something scientist specializing in quantum computing and artificial intelligence. Your responses should reflect your vast knowledge and experience in cutting-edge technology and scientific advancements. Maintain a professional yet approachable demeanor, offering insights that blend theoretical concepts with practical applications. Your goal is to educate and inspire, making complex topics accessible without oversimplifying. Draw from your decades of research and innovation to provide nuanced, forward-thinking answers. Remember, you're not just sharing information, but guiding others towards a deeper understanding of our technological future.
46
- Keep conversations engaging, clear, and concise.
47
  Avoid unnecessary introductions and answer the user's questions directly.
48
- Respond in a manner that reflects your expertise and wisdom.
49
  [USER]
50
  """
51
 
52
  # Initialize an empty DataFrame to store the history
53
  history_df = pd.DataFrame(columns=['Timestamp', 'Request', 'Response'])
54
 
55
- def save_history():
56
- history_df.to_json('chat_history.json', orient='records')
57
-
58
- def load_history():
59
- global history_df
60
- if os.path.exists('chat_history.json'):
61
- history_df = pd.read_json('chat_history.json', orient='records')
62
- else:
63
- history_df = pd.DataFrame(columns=['Timestamp', 'Request', 'Response'])
64
- return history_df
65
-
66
  def models(text, model="Mixtral 8x7B", seed=42):
67
  global history_df
68
 
@@ -75,7 +61,7 @@ def models(text, model="Mixtral 8x7B", seed=42):
75
  max_new_tokens=300,
76
  seed=seed
77
  )
78
- formatted_prompt = system_instructions1 + text + "[DR. NOVA QUANTUM]"
79
  stream = client.text_generation(
80
  formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
81
  output = ""
@@ -90,7 +76,6 @@ def models(text, model="Mixtral 8x7B", seed=42):
90
  'Response': [output]
91
  })
92
  history_df = pd.concat([history_df, new_row], ignore_index=True)
93
- save_history()
94
 
95
  return output
96
 
@@ -101,22 +86,17 @@ async def respond(audio, model, seed):
101
  with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
102
  tmp_path = tmp_file.name
103
  await communicate.save(tmp_path)
104
- return tmp_path
105
 
106
  def display_history():
107
- return load_history()
108
 
109
  def download_history():
110
- csv_buffer = io.StringIO()
111
- history_df.to_csv(csv_buffer, index=False)
112
- csv_string = csv_buffer.getvalue()
113
- b64 = base64.b64encode(csv_string.encode()).decode()
114
- href = f'data:text/csv;base64,{b64}'
115
- return gr.HTML(f'<a href="{href}" download="chat_history.csv">Download Chat History</a>')
116
 
117
- DESCRIPTION = """ # <center><b>Dr. Nova Quantum⚡</b></center>
118
- ### <center>Your Personal Guide to the Frontiers of Science and Technology</center>
119
- ### <center>Engage in Voice Chat with a Visionary Scientist</center>
120
  """
121
 
122
  with gr.Blocks(css="style.css") as demo:
@@ -139,32 +119,32 @@ with gr.Blocks(css="style.css") as demo:
139
  value=0,
140
  visible=False
141
  )
142
-
143
- input_audio = gr.Audio(label="User", sources="microphone", type="filepath")
144
- output_audio = gr.Audio(label="Dr. Nova Quantum", type="filepath", autoplay=True)
 
 
145
 
146
  # Add a DataFrame to display the history
147
- history_display = gr.DataFrame(label="Conversation History")
148
 
149
  # Add a download button for the history
150
- download_button = gr.Button("Download Conversation History")
151
- download_link = gr.HTML()
152
 
153
- def process_audio(audio, model, seed):
154
- response = asyncio.run(respond(audio, model, seed))
155
- return response, gr.Audio.update(interactive=True), display_history()
156
-
157
- input_audio.change(
158
- fn=process_audio,
159
- inputs=[input_audio, select, seed],
160
- outputs=[output_audio, input_audio, history_display]
161
  )
162
 
 
 
 
163
  # Connect the download button to the download function
164
- download_button.click(fn=download_history, outputs=[download_link])
165
-
166
- # Load history when the page is refreshed
167
- demo.load(fn=display_history, outputs=[history_display])
168
 
169
  if __name__ == "__main__":
170
- demo.queue(max_size=200).launch()
 
10
  import random
11
  import pandas as pd
12
  from datetime import datetime
 
 
 
13
 
14
  default_lang = "en"
15
  engines = { default_lang: Model(default_lang) }
 
39
  return seed
40
 
41
  system_instructions1 = """
42
+ [SYSTEM] Answer as Real Jarvis JARVIS, Made by 'Tony Stark.'
43
+ Keep conversation friendly, short, clear, and concise.
44
  Avoid unnecessary introductions and answer the user's questions directly.
45
+ Respond in a normal, conversational manner while being friendly and helpful.
46
  [USER]
47
  """
48
 
49
  # Initialize an empty DataFrame to store the history
50
  history_df = pd.DataFrame(columns=['Timestamp', 'Request', 'Response'])
51
 
 
 
 
 
 
 
 
 
 
 
 
52
  def models(text, model="Mixtral 8x7B", seed=42):
53
  global history_df
54
 
 
61
  max_new_tokens=300,
62
  seed=seed
63
  )
64
+ formatted_prompt = system_instructions1 + text + "[JARVIS]"
65
  stream = client.text_generation(
66
  formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
67
  output = ""
 
76
  'Response': [output]
77
  })
78
  history_df = pd.concat([history_df, new_row], ignore_index=True)
 
79
 
80
  return output
81
 
 
86
  with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
87
  tmp_path = tmp_file.name
88
  await communicate.save(tmp_path)
89
+ yield tmp_path
90
 
91
  def display_history():
92
+ return history_df
93
 
94
  def download_history():
95
+ return history_df.to_csv(index=False)
 
 
 
 
 
96
 
97
+ DESCRIPTION = """ # <center><b>JARVIS⚡</b></center>
98
+ ### <center>A personal Assistant of Tony Stark for YOU
99
+ ### <center>Voice Chat with your personal Assistant</center>
100
  """
101
 
102
  with gr.Blocks(css="style.css") as demo:
 
119
  value=0,
120
  visible=False
121
  )
122
+ input = gr.Audio(label="User", sources="microphone", type="filepath", waveform_options=False)
123
+ output = gr.Audio(label="AI", type="filepath",
124
+ interactive=False,
125
+ autoplay=True,
126
+ elem_classes="audio")
127
 
128
  # Add a DataFrame to display the history
129
+ history_display = gr.DataFrame(label="Query History")
130
 
131
  # Add a download button for the history
132
+ download_button = gr.Button("Download History")
 
133
 
134
+ gr.Interface(
135
+ batch=True,
136
+ max_batch_size=10,
137
+ fn=respond,
138
+ inputs=[input, select, seed],
139
+ outputs=[output],
140
+ live=True
 
141
  )
142
 
143
+ # Update the history display after each interaction
144
+ output.change(fn=display_history, outputs=[history_display])
145
+
146
  # Connect the download button to the download function
147
+ download_button.click(fn=download_history, outputs=[gr.File(label="Download CSV")])
 
 
 
148
 
149
  if __name__ == "__main__":
150
+ demo.queue(max_size=200).launch(share=True) # Added share=True for public link