zac commited on
Commit
dccd84a
·
1 Parent(s): b380f6b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -11,6 +11,8 @@ llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/OpenAssistant-Llama2-1
11
 
12
  history = []
13
 
 
 
14
  pre_prompt = " The user and the AI are having a conversation : "
15
 
16
  def generate_text(input_text, history):
@@ -18,12 +20,12 @@ def generate_text(input_text, history):
18
  print("input ", input_text)
19
  temp =""
20
  if history == []:
21
- input_text_with_history = f"{pre_prompt}"+ "\n" + f"Q: {input_text} " + "\n" +" A:"
22
  else:
23
  input_text_with_history = f"{history[-1][1]}"+ "\n"
24
- input_text_with_history += f"Q: {input_text}" + "\n" +" A:"
25
  print("new input", input_text_with_history)
26
- output = llm(input_text_with_history, max_tokens=1024, stop=["Q:", "\n"], stream=True)
27
  for out in output:
28
  stream = copy.deepcopy(out)
29
  print(stream["choices"][0]["text"])
 
11
 
12
  history = []
13
 
14
+ <|prompter|>{prompt}<|endoftext|><|assistant|>
15
+
16
  pre_prompt = " The user and the AI are having a conversation : "
17
 
18
  def generate_text(input_text, history):
 
20
  print("input ", input_text)
21
  temp =""
22
  if history == []:
23
+ input_text_with_history = f"{pre_prompt}"+ "\n" + f"<|prompter|> {input_text} " + "\n" +" <|assistant|>"
24
  else:
25
  input_text_with_history = f"{history[-1][1]}"+ "\n"
26
+ input_text_with_history += f"<|prompter|> {input_text}" + "\n" +" <|assistant|>"
27
  print("new input", input_text_with_history)
28
+ output = llm(input_text_with_history, max_tokens=1024, stop=["<|prompter|>", "<|endoftext|>", "\n"], stream=True)
29
  for out in output:
30
  stream = copy.deepcopy(out)
31
  print(stream["choices"][0]["text"])