ysharma HF staff commited on
Commit
ab59a6c
·
verified ·
1 Parent(s): 6fc3ff4

using tuples as message type

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -75,8 +75,8 @@ def chat_llama3_1_8b(message: str,
75
  str: The generated response.
76
  """
77
  conversation = []
78
-
79
- # Gradio now supports the Messages API out of the box!
80
  conversation.append({"role": "user", "content": message})
81
 
82
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt").to(model.device)
@@ -115,7 +115,6 @@ with gr.Blocks(fill_height=True, css=css) as demo:
115
  gr.ChatInterface(
116
  fn=chat_llama3_1_8b,
117
  chatbot=chatbot,
118
- type="messages",
119
  fill_height=True,
120
  examples_per_page=3,
121
  additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False, render=False),
 
75
  str: The generated response.
76
  """
77
  conversation = []
78
+ for user, assistant in history:
79
+ conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
80
  conversation.append({"role": "user", "content": message})
81
 
82
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt").to(model.device)
 
115
  gr.ChatInterface(
116
  fn=chat_llama3_1_8b,
117
  chatbot=chatbot,
 
118
  fill_height=True,
119
  examples_per_page=3,
120
  additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False, render=False),