Cran-May commited on
Commit
92be28a
1 Parent(s): 479256f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -7
app.py CHANGED
@@ -11,9 +11,9 @@ import subprocess
11
  from huggingface_hub import hf_hub_download
12
 
13
  # Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
14
- model = AutoModelForCausalLM.from_pretrained("TheBloke/dolphin-2.0-mistral-7B-GGUF", model_file="dolphin-2.0-mistral-7b.Q4_K_S.gguf", model_type="mistral", gpu_layers=0)
15
  ins = '''[INST] <<FRIDAY>>
16
- You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.
17
  If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
18
  <</FRIDAY>>
19
  {} [/INST]
@@ -33,10 +33,7 @@ def response(question):
33
 
34
 
35
  examples = [
36
- "Instead of making a peanut butter and jelly sandwich, what else could I combine peanut butter with in a sandwich? Give five ideas",
37
- "How do I make a campfire?",
38
- "Explain to me the difference between nuclear fission and fusion.",
39
- "I'm selling my Nikon D-750, write a short blurb for my ad."
40
  ]
41
 
42
  def process_example(args):
@@ -100,7 +97,7 @@ seafoam = SeafoamCustom()
100
  with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
101
  with gr.Column():
102
  gr.Markdown(
103
- """ ## Mistral-7b
104
 
105
  Type in the box below and click the button to generate answers to your most pressing questions!
106
 
 
11
  from huggingface_hub import hf_hub_download
12
 
13
  # Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
14
+ model = AutoModelForCausalLM.from_pretrained("TheBloke/dolphin-2.0-mistral-7B-GGUF", model_file="dolphin-2.0-mistral-7b.Q3_K_L.gguf", model_type="mistral", gpu_layers=0)
15
  ins = '''[INST] <<FRIDAY>>
16
+ Remember your name is "Shi-Ci" in English or “兮辞” in Chinese.You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.
17
  If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
18
  <</FRIDAY>>
19
  {} [/INST]
 
33
 
34
 
35
  examples = [
36
+ "Hello!"
 
 
 
37
  ]
38
 
39
  def process_example(args):
 
97
  with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
98
  with gr.Column():
99
  gr.Markdown(
100
+ """ ## Shi-Ci Extensional Analyzer
101
 
102
  Type in the box below and click the button to generate answers to your most pressing questions!
103