Update app.py
Browse files
app.py
CHANGED
@@ -2,16 +2,15 @@ import gradio as gr
|
|
2 |
import torch
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
4 |
|
5 |
-
|
6 |
tokenizer = AutoTokenizer.from_pretrained(
|
7 |
-
'kakaobrain/kogpt', revision='KoGPT6B-ryan1.5b-float16',
|
8 |
bos_token='[BOS]', eos_token='[EOS]', unk_token='[UNK]', pad_token='[PAD]', mask_token='[MASK]'
|
9 |
)
|
10 |
|
11 |
model = AutoModelForCausalLM.from_pretrained(
|
12 |
-
'kakaobrain/kogpt', revision='KoGPT6B-ryan1.5b-float16',
|
13 |
pad_token_id=tokenizer.eos_token_id,
|
14 |
-
torch_dtype=
|
15 |
).to(device='cpu', non_blocking=True)
|
16 |
_ = model.eval()
|
17 |
|
|
|
2 |
import torch
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
4 |
|
|
|
5 |
tokenizer = AutoTokenizer.from_pretrained(
|
6 |
+
'kakaobrain/kogpt', revision='KoGPT6B-ryan1.5b-float16', # or float32 version: revision=KoGPT6B-ryan1.5b
|
7 |
bos_token='[BOS]', eos_token='[EOS]', unk_token='[UNK]', pad_token='[PAD]', mask_token='[MASK]'
|
8 |
)
|
9 |
|
10 |
model = AutoModelForCausalLM.from_pretrained(
|
11 |
+
'kakaobrain/kogpt', revision='KoGPT6B-ryan1.5b-float16', # or float32 version: revision=KoGPT6B-ryan1.5b
|
12 |
pad_token_id=tokenizer.eos_token_id,
|
13 |
+
torch_dtype='auto', low_cpu_mem_usage=True
|
14 |
).to(device='cpu', non_blocking=True)
|
15 |
_ = model.eval()
|
16 |
|