lvwerra HF staff commited on
Commit
d133b7d
1 Parent(s): 1ee3148

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -15
app.py CHANGED
@@ -147,7 +147,7 @@ def generate(
147
  output = system_prompt + prompt
148
  generation_start_idx = len(output)
149
  highlighted_output = [
150
- (prompt, "query"),
151
  ]
152
  yield highlighted_output, output[generation_start_idx:]
153
  for response in stream:
@@ -181,13 +181,13 @@ def generate(
181
  # I am sorry about the code
182
  print("-------", generation_start_idx, request_idx, call_idx, response_idx)
183
  highlighted_output = [
184
- (prompt, "query"),
185
- (output[generation_start_idx:], "model") if request_idx == -1 else ("", ""),
186
- (output[generation_start_idx:generation_start_idx+request_idx], "model"),
187
- (output[generation_start_idx+request_idx:], "model") if call_idx == -1 else ("", ""),
188
- (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "tool request"),
189
- (output[generation_start_idx+call_idx:generation_start_idx+response_idx], "tool call"),
190
- (output[generation_start_idx+response_idx:], "model") if submit_idx != -1 else ("", ""),
191
  # (output[generation_start_idx:generation_start_idx+request_idx], ""),
192
  # (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "request"),
193
  # (output[generation_start_idx+call_idx:], "call"),
@@ -207,11 +207,11 @@ def generate(
207
  submit_idx = output[generation_start_idx:].find("<submit>")
208
  # print("-------", generation_start_idx, request_idx, call_idx, response_idx)
209
  highlighted_output = [
210
- (prompt, "query"),
211
- (output[generation_start_idx:generation_start_idx+request_idx], "model"),
212
- (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "request"),
213
- (output[generation_start_idx+call_idx:generation_start_idx+response_idx], "call"),
214
- (output[generation_start_idx+response_idx:], "model") if submit_idx != -1 else ("", ""),
215
  ]
216
  # print(highlighted_output, output[generation_start_idx:])
217
  yield highlighted_output, output[generation_start_idx:]
@@ -291,9 +291,9 @@ with gr.Blocks(theme=theme, analytics_enabled=False, css=css) as demo:
291
 
292
  output = gr.HighlightedText(
293
  label="Output",
294
- color_map={"query": "red", "tool call": "green", "tool response": "blue", "model": "pink"},
295
  )
296
-
297
  gr_examples = gr.Examples(
298
  examples=[example for client in clients.values() for example in client[3]],
299
  inputs=[instruction],
 
147
  output = system_prompt + prompt
148
  generation_start_idx = len(output)
149
  highlighted_output = [
150
+ (prompt, "QUERY"),
151
  ]
152
  yield highlighted_output, output[generation_start_idx:]
153
  for response in stream:
 
181
  # I am sorry about the code
182
  print("-------", generation_start_idx, request_idx, call_idx, response_idx)
183
  highlighted_output = [
184
+ (prompt, "QUERY"),
185
+ (output[generation_start_idx:], "MODEL") if request_idx == -1 else ("", ""),
186
+ (output[generation_start_idx:generation_start_idx+request_idx], "MODEL"),
187
+ (output[generation_start_idx+request_idx:], "MODEL") if call_idx == -1 else ("", ""),
188
+ (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "TOOL_REQUEST"),
189
+ (output[generation_start_idx+call_idx:generation_start_idx+response_idx], "TOOL_CALL"),
190
+ (output[generation_start_idx+response_idx:], "MODEL") if submit_idx != -1 else ("", ""),
191
  # (output[generation_start_idx:generation_start_idx+request_idx], ""),
192
  # (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "request"),
193
  # (output[generation_start_idx+call_idx:], "call"),
 
207
  submit_idx = output[generation_start_idx:].find("<submit>")
208
  # print("-------", generation_start_idx, request_idx, call_idx, response_idx)
209
  highlighted_output = [
210
+ (prompt, "QUERY"),
211
+ (output[generation_start_idx:generation_start_idx+request_idx], "MODEL"),
212
+ (output[generation_start_idx+request_idx:generation_start_idx+call_idx], "TOOL_REQUEST"),
213
+ (output[generation_start_idx+call_idx:generation_start_idx+response_idx], "TOOL_CALL"),
214
+ (output[generation_start_idx+response_idx:], "MODEL") if submit_idx != -1 else ("", ""),
215
  ]
216
  # print(highlighted_output, output[generation_start_idx:])
217
  yield highlighted_output, output[generation_start_idx:]
 
291
 
292
  output = gr.HighlightedText(
293
  label="Output",
294
+ color_map={"QUERY": "red", "TOOL_CALL": "green", "TOOL_RESPONSE": "blue", "MODEL": "pink"},
295
  )
296
+ gr.Markdown("_Note:_ The trivia model is trained to give an answer first and then refine it with a Wiki call.")
297
  gr_examples = gr.Examples(
298
  examples=[example for client in clients.values() for example in client[3]],
299
  inputs=[instruction],