Update app.py
Browse files
app.py
CHANGED
@@ -5,8 +5,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig,
|
|
5 |
import torch
|
6 |
|
7 |
# Define the model repository
|
8 |
-
REPO_NAME = 'schuler/experimental-JP47D20'
|
9 |
-
|
10 |
|
11 |
# How to cache?
|
12 |
def load_model(repo_name):
|
@@ -98,7 +98,7 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
|
|
98 |
demo = gr.ChatInterface(
|
99 |
respond,
|
100 |
additional_inputs=[
|
101 |
-
gr.Textbox(value="
|
102 |
gr.Slider(minimum=1, maximum=2048, value=64, step=1, label="Max new tokens"),
|
103 |
# gr.Slider(minimum=0.1, maximum=4.0, value=1.0, step=0.1, label="Temperature"),
|
104 |
gr.Slider(
|
|
|
5 |
import torch
|
6 |
|
7 |
# Define the model repository
|
8 |
+
# REPO_NAME = 'schuler/experimental-JP47D20'
|
9 |
+
REPO_NAME = 'schuler/experimental-JP47D21-KPhi-3-micro-4k-instruct'
|
10 |
|
11 |
# How to cache?
|
12 |
def load_model(repo_name):
|
|
|
98 |
demo = gr.ChatInterface(
|
99 |
respond,
|
100 |
additional_inputs=[
|
101 |
+
gr.Textbox(value="" + global_error, label="System message"),
|
102 |
gr.Slider(minimum=1, maximum=2048, value=64, step=1, label="Max new tokens"),
|
103 |
# gr.Slider(minimum=0.1, maximum=4.0, value=1.0, step=0.1, label="Temperature"),
|
104 |
gr.Slider(
|