Update app.py
Browse files
app.py
CHANGED
@@ -1,36 +1,3 @@
|
|
1 |
import gradio as gr
|
2 |
|
3 |
-
gr.load("models/mistralai/Mistral-7B-Instruct-v0.3").launch()
|
4 |
-
import gradio as gr
|
5 |
-
import torch
|
6 |
-
from mamba_model import MambaModel
|
7 |
-
|
8 |
-
# Load the model
|
9 |
-
model = MambaModel.from_pretrained(pretrained_model_name="Zyphra/BlackMamba-2.8B")
|
10 |
-
model = model.cuda().half()
|
11 |
-
|
12 |
-
# Define the function to generate output
|
13 |
-
def generate_output(input_text):
|
14 |
-
# Convert the input text (comma-separated numbers) to a list of integers
|
15 |
-
try:
|
16 |
-
input_ids = [int(x.strip()) for x in input_text.split(",")]
|
17 |
-
inputs = torch.tensor(input_ids).cuda().long().unsqueeze(0)
|
18 |
-
|
19 |
-
# Run the model and get output
|
20 |
-
with torch.no_grad():
|
21 |
-
out = model(inputs)
|
22 |
-
|
23 |
-
# Convert output to a human-readable format
|
24 |
-
return out.cpu().numpy().tolist()
|
25 |
-
except Exception as e:
|
26 |
-
return f"Error: {str(e)}"
|
27 |
-
|
28 |
-
# Set up the Gradio interface
|
29 |
-
input_component = gr.Textbox(label="Input IDs (comma-separated)", placeholder="Enter input IDs like: 1, 2")
|
30 |
-
output_component = gr.Textbox(label="Output")
|
31 |
-
|
32 |
-
iface = gr.Interface(fn=generate_output, inputs=input_component, outputs=output_component, title="BlackMamba Model")
|
33 |
-
|
34 |
-
# Launch the interface
|
35 |
-
if __name__ == "__main__":
|
36 |
-
iface.launch()
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
+
gr.load("models/mistralai/Mistral-7B-Instruct-v0.3").launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|