zac commited on
Commit
ee51f28
·
1 Parent(s): dccd84a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -11,8 +11,6 @@ llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/OpenAssistant-Llama2-1
11
 
12
  history = []
13
 
14
- <|prompter|>{prompt}<|endoftext|><|assistant|>
15
-
16
  pre_prompt = " The user and the AI are having a conversation : "
17
 
18
  def generate_text(input_text, history):
@@ -25,7 +23,7 @@ def generate_text(input_text, history):
25
  input_text_with_history = f"{history[-1][1]}"+ "\n"
26
  input_text_with_history += f"<|prompter|> {input_text}" + "\n" +" <|assistant|>"
27
  print("new input", input_text_with_history)
28
- output = llm(input_text_with_history, max_tokens=1024, stop=["<|prompter|>", "<|endoftext|>", "\n"], stream=True)
29
  for out in output:
30
  stream = copy.deepcopy(out)
31
  print(stream["choices"][0]["text"])
 
11
 
12
  history = []
13
 
 
 
14
  pre_prompt = " The user and the AI are having a conversation : "
15
 
16
  def generate_text(input_text, history):
 
23
  input_text_with_history = f"{history[-1][1]}"+ "\n"
24
  input_text_with_history += f"<|prompter|> {input_text}" + "\n" +" <|assistant|>"
25
  print("new input", input_text_with_history)
26
+ output = llm(input_text_with_history, max_tokens=1024, stop=["<|prompter|>", "<|endoftext|>"], stream=True)
27
  for out in output:
28
  stream = copy.deepcopy(out)
29
  print(stream["choices"][0]["text"])