schuler commited on
Commit
75c74b0
·
verified ·
1 Parent(s): ad8bce1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
 
3
- import os
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, pipeline
5
  import torch
6
 
@@ -49,9 +49,9 @@ def respond(
49
  for hmessage in messages:
50
  role = "<|assistant|>" if hmessage['role'] == 'assistant' else "<|user|>"
51
  prompt += f"\n{role}\n{hmessage['content']}\n<|end|>\n"
52
- prompt += f"\n<|user|>\n{message}\n<|end|><|assistant|>\n"
53
 
54
- """
55
  # Generate the response
56
  response_output = generator(
57
  prompt,
@@ -68,10 +68,12 @@ def respond(
68
 
69
  # Extract the assistant's response
70
  result = generated_text[len(prompt):].strip()
71
- """
72
- result = message+':'+prompt
73
  except Exception as error:
74
- result = str(error)
 
 
75
 
76
  yield result
77
 
 
1
  import gradio as gr
2
 
3
+ import os, sys
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, pipeline
5
  import torch
6
 
 
49
  for hmessage in messages:
50
  role = "<|assistant|>" if hmessage['role'] == 'assistant' else "<|user|>"
51
  prompt += f"\n{role}\n{hmessage['content']}\n<|end|>\n"
52
+ # prompt += f"\n<|user|>\n{message}\n<|end|><|assistant|>\n"
53
 
54
+ # """
55
  # Generate the response
56
  response_output = generator(
57
  prompt,
 
68
 
69
  # Extract the assistant's response
70
  result = generated_text[len(prompt):].strip()
71
+ # """
72
+ # result = message+':'+prompt
73
  except Exception as error:
74
+ exc_type, exc_obj, exc_tb = sys.exc_info()
75
+ fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
76
+ result = str(error) +':'+ exc_type +':'+ fname +':'+ exc_tb.tb_lineno
77
 
78
  yield result
79