zac commited on
Commit
a14b41a
·
1 Parent(s): 2784000

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -11,21 +11,23 @@ llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/orca_mini_3B-GGML", fi
11
 
12
  history = []
13
 
 
 
14
  def generate_text(input_text, history):
15
  print("history ",history)
16
  print("input ", input_text)
17
  if history == []:
18
- input_text_with_history = f"Q: {input_text} \n A:"
19
  else:
20
  input_text_with_history = f"{history[-1][1]}"+ "\n"
21
- input_text_with_history += f"Q: {input_text} \n A:"
22
  print("new input", input_text_with_history)
23
- output = llm(input_text_with_history, max_tokens=1024, stop=["Q:", "\n"], stream=True, echo=True)
24
 
25
  for out in output:
26
  stream = copy.deepcopy(out)
27
  print(stream["choices"][0]["text"])
28
- yield output
29
 
30
  history =["init",input_text_with_history]
31
 
 
11
 
12
  history = []
13
 
14
+ preprompt = " The user and the AI are having a conversation : "
15
+
16
  def generate_text(input_text, history):
17
  print("history ",history)
18
  print("input ", input_text)
19
  if history == []:
20
+ input_text_with_history = f"{pre_prompt}"+ "\n" + f"Q: {input_text} " + "\n" +" A:"
21
  else:
22
  input_text_with_history = f"{history[-1][1]}"+ "\n"
23
+ input_text_with_history += f"Q: {input_text}" + "\n" +" A:"
24
  print("new input", input_text_with_history)
25
+ output = llm(input_text_with_history, max_tokens=1024, stop=["Q:", "\n"], stream=True)
26
 
27
  for out in output:
28
  stream = copy.deepcopy(out)
29
  print(stream["choices"][0]["text"])
30
+ yield stream["choices"][0]["text"]
31
 
32
  history =["init",input_text_with_history]
33