File size: 3,149 Bytes
f506d75
6ebcdab
 
ece93a7
 
3dc4061
 
 
 
f506d75
2df0bb2
f506d75
 
 
c93fb24
f506d75
 
81395fc
237d9d2
 
 
e6521eb
 
 
f506d75
 
 
 
 
 
 
 
 
c06f586
f506d75
c06f586
f506d75
81395fc
e6521eb
f506d75
e6521eb
c06f586
 
 
e6521eb
c06f586
d992640
237d9d2
b01335d
f506d75
4094ebf
f506d75
 
3856ed3
4094ebf
72b512b
b01335d
 
 
 
 
 
e6521eb
b01335d
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import gradio as gr
import os


# Define the device
device = "cuda" if torch.cuda.is_available() else "cpu"

# Load tokenizer and model
tokenizer = AutoTokenizer.from_pretrained('stabilityai/stablelm-zephyr-3b')
model = AutoModelForCausalLM.from_pretrained(
    'stabilityai/stablelm-zephyr-3b',
    trust_remote_code=True,
    device_map="auto"
)
model.to(device)

class ChatBot:
    def __init__(self):
        self.history = []

    def predict(self, user_input, system_prompt="You are an expert analyst and provide assessment:"):
        prompt = [{'role': 'user', 'content': user_input + "\n" + system_prompt + ":"}]
        inputs = tokenizer.apply_chat_template(
            prompt,
            add_generation_prompt=True,
            return_tensors='pt'
        )

        # Generate a response using the model
        tokens = model.generate(
            inputs.to(model.device),
            max_new_tokens=250,
            temperature=0.8,
            do_sample=False
        )

        # Decode the response
        response_text = tokenizer.decode(tokens[0], skip_special_tokens=False)

        # Free up memory
        del tokens
        torch.cuda.empty_cache()

        return response_text

bot = ChatBot()

title = "πŸ‘‹πŸ»Welcome to 🌟Tonic'sπŸ—½Stable🌟LM 3BπŸš€Chat"
description = """
You can use this Space to test out the current model [stabilityai/stablelm-zephyr-3b](https://huggingface.co/stabilityai/stablelm-zephyr-3b)
You can also use 😷StableMedβš•οΈ on your laptop & by cloning this space. πŸ§¬πŸ”¬πŸ” Simply click here: <a style="display:inline-block" href="https://huggingface.co/spaces/Tonic/TonicsStableLM3B?duplicate=true"><img src="https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14" alt="Duplicate Space"></a></h3> 
Join us : 🌟TeamTonic🌟 is always making cool demos! Join our active builder'sπŸ› οΈcommunity on πŸ‘»Discord: [Discord](https://discord.gg/GWpVpekp) On πŸ€—Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) On 🌐Github: [Polytonic](https://github.com/tonic-ai) & contribute to 🌟 [PolyGPT](https://github.com/tonic-ai/polygpt-alpha)
"""
examples = [["What is the proper treatment for buccal herpes?", "Please provide information on the most effective antiviral medications and home remedies for treating buccal herpes."]]

iface = gr.Interface(
    fn=bot.predict,
    title=title,
    description=description,
    examples=examples,
    inputs=["text", "text"],
    outputs="text",
    theme="ParityError/Anime"
)

iface.launch()