Spaces:
Sleeping
Sleeping
RamAnanth1
commited on
Commit
·
8eefb9c
1
Parent(s):
189eb29
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
-
from
|
|
|
|
|
3 |
import torch
|
4 |
|
5 |
theme = gr.themes.Monochrome(
|
@@ -10,9 +12,13 @@ theme = gr.themes.Monochrome(
|
|
10 |
font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
|
11 |
)
|
12 |
|
13 |
-
|
|
|
|
|
|
|
|
|
14 |
def generate(instruction):
|
15 |
-
response =
|
16 |
result = ""
|
17 |
for word in response.split(" "):
|
18 |
result += word + " "
|
|
|
1 |
import gradio as gr
|
2 |
+
from instruct_pipeline import InstructionTextGenerationPipeline
|
3 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
+
|
5 |
import torch
|
6 |
|
7 |
theme = gr.themes.Monochrome(
|
|
|
12 |
font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
|
13 |
)
|
14 |
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained("databricks/dolly-v2-12b", padding_side="left")
|
16 |
+
model = AutoModelForCausalLM.from_pretrained("databricks/dolly-v2-12b", load_in_8bit=True, device_map="auto")
|
17 |
+
|
18 |
+
generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer)
|
19 |
+
|
20 |
def generate(instruction):
|
21 |
+
response = generate_text(instruction)
|
22 |
result = ""
|
23 |
for word in response.split(" "):
|
24 |
result += word + " "
|