Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,17 +4,17 @@ import json
|
|
4 |
import time
|
5 |
|
6 |
# Initialize the pipeline with the new model
|
7 |
-
pipe = pipeline("text-generation", model="Blexus/
|
8 |
|
9 |
def format_prompt(message, system, history):
|
10 |
-
prompt = f"SYSTEM: {system}
|
11 |
|
12 |
for entry in history:
|
13 |
if len(entry) == 2:
|
14 |
user_prompt, bot_response = entry
|
15 |
-
prompt += f"USER: {user_prompt}
|
16 |
|
17 |
-
prompt += f"USER: {message}
|
18 |
return prompt
|
19 |
|
20 |
def generate(prompt, system, history, temperature=0.9, max_new_tokens=4096, top_p=0.9, repetition_penalty=1.2):
|
@@ -50,7 +50,7 @@ customCSS = """
|
|
50 |
additional_inputs = [
|
51 |
gr.Textbox(
|
52 |
label="System prompt",
|
53 |
-
value="You are a helpful assistant
|
54 |
info="System prompt",
|
55 |
interactive=True,
|
56 |
),
|
|
|
4 |
import time
|
5 |
|
6 |
# Initialize the pipeline with the new model
|
7 |
+
pipe = pipeline("text-generation", model="Blexus/Quble_test_model_v1_INSTRUCT_v2")
|
8 |
|
9 |
def format_prompt(message, system, history):
|
10 |
+
prompt = f"SYSTEM: {system} <|endofsystem|>"
|
11 |
|
12 |
for entry in history:
|
13 |
if len(entry) == 2:
|
14 |
user_prompt, bot_response = entry
|
15 |
+
prompt += f"USER: {user_prompt} <|endofuser|>\nASSISTANT: {bot_response}<|endoftext|>\n"
|
16 |
|
17 |
+
prompt += f"USER: {message}<|endofuser|>\nASSISTANT:"
|
18 |
return prompt
|
19 |
|
20 |
def generate(prompt, system, history, temperature=0.9, max_new_tokens=4096, top_p=0.9, repetition_penalty=1.2):
|
|
|
50 |
additional_inputs = [
|
51 |
gr.Textbox(
|
52 |
label="System prompt",
|
53 |
+
value="You are a helpful intelligent assistant. Your name is Quble.",
|
54 |
info="System prompt",
|
55 |
interactive=True,
|
56 |
),
|