Text Generation
Transformers
Safetensors
Japanese
mistral
causal-lm
Not-For-All-Audiences
nsfw
text-generation-inference
Update README.md
Browse files
README.md
CHANGED
@@ -8,7 +8,7 @@ tags:
|
|
8 |
pipeline_tag: text-generation
|
9 |
---
|
10 |
|
11 |
-
# Berghof
|
12 |
|
13 |
<img src="https://huggingface.co/Elizezen/Berghof-vanilla-7B/resolve/main/OIG1%20(2).jpg" alt="drawing" style="width:512px;"/>
|
14 |
|
@@ -24,9 +24,9 @@ Ensure you are using Transformers 4.34.0 or newer.
|
|
24 |
import torch
|
25 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
26 |
|
27 |
-
tokenizer = AutoTokenizer.from_pretrained("Elizezen/Berghof-
|
28 |
model = AutoModelForCausalLM.from_pretrained(
|
29 |
-
"Elizezen/Berghof-
|
30 |
torch_dtype="auto",
|
31 |
)
|
32 |
model.eval()
|
|
|
8 |
pipeline_tag: text-generation
|
9 |
---
|
10 |
|
11 |
+
# Berghof NSFW 7B
|
12 |
|
13 |
<img src="https://huggingface.co/Elizezen/Berghof-vanilla-7B/resolve/main/OIG1%20(2).jpg" alt="drawing" style="width:512px;"/>
|
14 |
|
|
|
24 |
import torch
|
25 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
26 |
|
27 |
+
tokenizer = AutoTokenizer.from_pretrained("Elizezen/Berghof-NSFW-7B")
|
28 |
model = AutoModelForCausalLM.from_pretrained(
|
29 |
+
"Elizezen/Berghof-NSFW-7B",
|
30 |
torch_dtype="auto",
|
31 |
)
|
32 |
model.eval()
|