admin commited on
Commit
c54c298
·
1 Parent(s): 832cc9b

trans 2 en

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. requirements.txt +0 -1
app.py CHANGED
@@ -7,7 +7,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
7
  MODEL_ID = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
8
  MODEL_NAME = MODEL_ID.split("/")[-1]
9
  CONTEXT_LENGTH = 16000
10
- DESCRIPTION = f"This is {MODEL_NAME} model designed for testing thinking for general AI tasks. <br>当前仅提供 HuggingFace 版部署实例,有算力的可自行克隆至本地或复刻至购买了 GPU 环境的账号测试"
11
 
12
 
13
  def predict(
@@ -72,7 +72,7 @@ if __name__ == "__main__":
72
  # Create Gradio interface
73
  gr.ChatInterface(
74
  predict,
75
- title=f"{MODEL_NAME} 部署实例",
76
  description=DESCRIPTION,
77
  additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False),
78
  additional_inputs=[
 
7
  MODEL_ID = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
8
  MODEL_NAME = MODEL_ID.split("/")[-1]
9
  CONTEXT_LENGTH = 16000
10
+ DESCRIPTION = f"This is a HuggingFace deployment instance of {MODEL_NAME} model, if you have computing power, you can test by cloning to local or forking to an account with purchased GPU environment"
11
 
12
 
13
  def predict(
 
72
  # Create Gradio interface
73
  gr.ChatInterface(
74
  predict,
75
+ title=f"{MODEL_NAME} Deployment Instance",
76
  description=DESCRIPTION,
77
  additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False),
78
  additional_inputs=[
requirements.txt CHANGED
@@ -1,5 +1,4 @@
1
  torch==2.3.1+cu118
2
- -f https://mirrors.aliyun.com/pytorch-wheels/cu118
3
  huggingface_hub==0.25.2
4
  transformers
5
  accelerate
 
1
  torch==2.3.1+cu118
 
2
  huggingface_hub==0.25.2
3
  transformers
4
  accelerate