milk639 commited on
Commit
fd582de
·
verified ·
1 Parent(s): e80ec63

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -0
app.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ # Check if GPU is available
4
+ if torch.cuda.is_available():
5
+ device = torch.device("cuda")
6
+ print("GPU is available. Using GPU.")
7
+ else:
8
+ device = torch.device("cpu")
9
+ print("GPU not available. Using CPU.")
10
+
11
+ # Example model loading
12
+ from transformers import AutoModelForCausalLM, AutoTokenizer
13
+
14
+ model_name = "cerebras/btlm-3b-8k-base"
15
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
16
+ model = AutoModelForCausalLM.from_pretrained(model_name)
17
+
18
+ # Move model to the appropriate device
19
+ model.to(device)