mihirjadhav commited on
Commit
b9409b0
1 Parent(s): d252fff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -7
app.py CHANGED
@@ -33,13 +33,25 @@ def generate(
33
 
34
  formatted_prompt = format_prompt(prompt, history)
35
 
36
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
37
- output = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- for response in stream:
40
- output += response.token.text
41
- yield output
42
- return output
43
 
44
 
45
  additional_inputs=[
@@ -56,7 +68,7 @@ additional_inputs=[
56
  label="Max new tokens",
57
  value=256,
58
  minimum=0,
59
- maximum=1048,
60
  step=64,
61
  interactive=True,
62
  info="The maximum numbers of new tokens",
 
33
 
34
  formatted_prompt = format_prompt(prompt, history)
35
 
36
+ try:
37
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
38
+ output = ""
39
+ for response in stream:
40
+ if response.token.text: # Ensuring that there is text to add
41
+ output += response.token.text
42
+ yield output # Yield the updated output each time a new token is received
43
+ except Exception as e:
44
+ yield f"An error occurred: {str(e)}" # Handle any exceptions that might occur
45
+
46
+ # stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
47
+ # output = ""
48
+
49
+ # for response in stream:
50
+ # output += response.token.text
51
+ # yield output
52
+ # return output
53
+
54
 
 
 
 
 
55
 
56
 
57
  additional_inputs=[
 
68
  label="Max new tokens",
69
  value=256,
70
  minimum=0,
71
+ maximum=524,
72
  step=64,
73
  interactive=True,
74
  info="The maximum numbers of new tokens",