ffreemt commited on
Commit
6bd1e61
·
1 Parent(s): 324ff2d

Update exist when no cuda present

Browse files
Files changed (1) hide show
  1. app.py +5 -0
app.py CHANGED
@@ -3,6 +3,7 @@ import gc
3
  import os
4
  import time
5
 
 
6
  import rich
7
  import torch
8
  from huggingface_hub import snapshot_download
@@ -11,6 +12,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
11
  from transformers.generation.utils import GenerationConfig
12
 
13
  model_name = "baichuan-inc/Baichuan2-13B-Chat-4bits"
 
 
 
 
14
  # snapshot_download?
15
  loc = snapshot_download(repo_id=model_name, local_dir="model")
16
 
 
3
  import os
4
  import time
5
 
6
+ import gradio
7
  import rich
8
  import torch
9
  from huggingface_hub import snapshot_download
 
12
  from transformers.generation.utils import GenerationConfig
13
 
14
  model_name = "baichuan-inc/Baichuan2-13B-Chat-4bits"
15
+ if not torch.cuda.is_available():
16
+ gradio.Error(f"No cuda, cant run {model_name}")
17
+ raise SystemError(1)
18
+
19
  # snapshot_download?
20
  loc = snapshot_download(repo_id=model_name, local_dir="model")
21