markcoatsworth commited on
Commit
decedce
·
1 Parent(s): af71772

Experimenting with library installation

Browse files
Files changed (1) hide show
  1. app.py +13 -4
app.py CHANGED
@@ -1,5 +1,10 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForCausalLM
 
 
 
 
 
3
 
4
  model = None
5
 
@@ -7,9 +12,8 @@ model = None
7
  def greet(name):
8
  return "Hello " + name + "!!"
9
 
10
-
11
- def main():
12
-
13
  print(f"Loading model...")
14
  model_path = "meta-llama/Meta-Llama-3-8B"
15
  tokenizer = AutoTokenizer.from_pretrained(model_path)
@@ -19,6 +23,11 @@ def main():
19
  device_map="auto",
20
  )
21
 
 
 
 
 
 
22
  demo = gr.Interface(fn=greet, inputs="text", outputs="text")
23
  demo.launch()
24
 
 
1
  import gradio as gr
2
+ import spaces
3
+ import subprocess
4
+ # from transformers import AutoTokenizer, AutoModelForCausalLM
5
+
6
+ result = subprocess.run(["python3", "-m", "pip", "install", "transformers==4.34.0"], shell=True, capture_output=True, text=True)
7
+ print(result.stdout)
8
 
9
  model = None
10
 
 
12
  def greet(name):
13
  return "Hello " + name + "!!"
14
 
15
+ @spaces.GPU
16
+ def load_model():
 
17
  print(f"Loading model...")
18
  model_path = "meta-llama/Meta-Llama-3-8B"
19
  tokenizer = AutoTokenizer.from_pretrained(model_path)
 
23
  device_map="auto",
24
  )
25
 
26
+
27
+ def main():
28
+
29
+ load_model()
30
+
31
  demo = gr.Interface(fn=greet, inputs="text", outputs="text")
32
  demo.launch()
33