ffreemt commited on
Commit
c2783a5
1 Parent(s): 4410eba

Update reset=True

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -142,9 +142,13 @@ def generate(
142
  """Run model inference, will return a Generator if streaming is true."""
143
  # _ = prompt_template.format(question=question)
144
  # print(_)
 
 
 
145
  return llm(
146
  question,
147
- **asdict(generation_config),
 
148
  )
149
 
150
 
@@ -357,11 +361,7 @@ with gr.Blocks(
357
  f"""<h5><center>{Path(model_loc).name}</center></h4>
358
  Most examples are meant for another model.
359
  You probably should try to test
360
- some related prompts. For example:
361
-
362
- Question: {{question}}
363
-
364
- Answer: Let's work this out in a step by step way to be sure we have the right answer.""",
365
  elem_classes="xsmall",
366
  )
367
 
 
142
  """Run model inference, will return a Generator if streaming is true."""
143
  # _ = prompt_template.format(question=question)
144
  # print(_)
145
+
146
+ config = GenerationConfig(reset=True) # rid of OOM?
147
+
148
  return llm(
149
  question,
150
+ # **asdict(generation_config),
151
+ **asdict(config),
152
  )
153
 
154
 
 
361
  f"""<h5><center>{Path(model_loc).name}</center></h4>
362
  Most examples are meant for another model.
363
  You probably should try to test
364
+ some related prompts.""",
 
 
 
 
365
  elem_classes="xsmall",
366
  )
367