Spaces:
Running
on
Zero
Running
on
Zero
Visibility improvements (#2)
Browse files- Visibility improvements (7c4b339a3519a800d20331082a0570c49d22c2d9)
- description (ee68945ca7c188bdf5ac2a86a249a003e64a66a8)
Co-authored-by: pandora <[email protected]>
README.md
CHANGED
@@ -1,12 +1,13 @@
|
|
1 |
---
|
2 |
title: Mistral Pixtral Demo
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.44.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
-
|
11 |
-
|
12 |
-
|
|
|
|
1 |
---
|
2 |
title: Mistral Pixtral Demo
|
3 |
+
emoji: π
|
4 |
+
colorFrom: red
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.44.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
+
models:
|
11 |
+
- mistralai/Pixtral-12B-2409
|
12 |
+
short_description: Chat with Pixtral 12B using Mistral Inference
|
13 |
+
---
|
app.py
CHANGED
@@ -55,5 +55,5 @@ def run_inference(message, history):
|
|
55 |
result = tokenizer.decode(out_tokens[0])
|
56 |
return result
|
57 |
|
58 |
-
demo = gr.ChatInterface(fn=run_inference, title="Pixtral 12B", multimodal=True)
|
59 |
demo.queue().launch()
|
|
|
55 |
result = tokenizer.decode(out_tokens[0])
|
56 |
return result
|
57 |
|
58 |
+
demo = gr.ChatInterface(fn=run_inference, title="Pixtral 12B", multimodal=True, description="A demo chat interface with Pixtral 12B, deployed using Mistral Inference.")
|
59 |
demo.queue().launch()
|