Spaces:
Runtime error
Runtime error
Upload app.py
Browse files
app.py
CHANGED
@@ -28,7 +28,7 @@ tokenizer.pad_token = tokenizer.eos_token
|
|
28 |
tokenizer.padding_side = "right"
|
29 |
|
30 |
# β
Set up text generation pipeline
|
31 |
-
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=500)
|
32 |
|
33 |
|
34 |
# β
Chatbot Function with Conversation History
|
@@ -44,7 +44,7 @@ def chat(user_input, history=[]):
|
|
44 |
prompt = f"{formatted_history}\n\n### User:\n{user_input}\n\n### Assistant:\n"
|
45 |
|
46 |
# Generate response
|
47 |
-
response = generator(prompt, max_length=128, do_sample=True)
|
48 |
|
49 |
# Extract only the model's generated response
|
50 |
answer = response[0]["generated_text"].split("### Assistant:\n")[-1].strip()
|
@@ -52,7 +52,7 @@ def chat(user_input, history=[]):
|
|
52 |
# Update conversation history
|
53 |
history.append((user_input, answer))
|
54 |
|
55 |
-
return
|
56 |
|
57 |
|
58 |
# β
Create Gradio Chat Interface
|
|
|
28 |
tokenizer.padding_side = "right"
|
29 |
|
30 |
# β
Set up text generation pipeline
|
31 |
+
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=500, truncation=True)
|
32 |
|
33 |
|
34 |
# β
Chatbot Function with Conversation History
|
|
|
44 |
prompt = f"{formatted_history}\n\n### User:\n{user_input}\n\n### Assistant:\n"
|
45 |
|
46 |
# Generate response
|
47 |
+
response = generator(prompt, max_length=128, do_sample=True, truncation=True)
|
48 |
|
49 |
# Extract only the model's generated response
|
50 |
answer = response[0]["generated_text"].split("### Assistant:\n")[-1].strip()
|
|
|
52 |
# Update conversation history
|
53 |
history.append((user_input, answer))
|
54 |
|
55 |
+
return history # Return empty input and updated history
|
56 |
|
57 |
|
58 |
# β
Create Gradio Chat Interface
|