Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import os
|
|
3 |
from llama_cpp import Llama
|
4 |
from huggingface_hub import hf_hub_download
|
5 |
|
6 |
-
os.environ["LLAMA_CPP_USE_CUDA"] = "0"
|
7 |
|
8 |
title = "Gemma 2 2B - Bulgarian Joke Master - GGUF"
|
9 |
description = """
|
@@ -65,8 +65,8 @@ with gr.ChatInterface(
|
|
65 |
title=title,
|
66 |
description=description,
|
67 |
theme="huggingface",
|
68 |
-
examples=[[
|
69 |
) as demo:
|
70 |
-
demo.launch(share=True)
|
71 |
|
72 |
llm.close()
|
|
|
3 |
from llama_cpp import Llama
|
4 |
from huggingface_hub import hf_hub_download
|
5 |
|
6 |
+
os.environ["LLAMA_CPP_USE_CUDA"] = "0"
|
7 |
|
8 |
title = "Gemma 2 2B - Bulgarian Joke Master - GGUF"
|
9 |
description = """
|
|
|
65 |
title=title,
|
66 |
description=description,
|
67 |
theme="huggingface",
|
68 |
+
examples=[[{'role': 'user', 'content': 'Hello, tell me a Bulgarian joke!'}]] # Updated to correct format
|
69 |
) as demo:
|
70 |
+
demo.launch(share=True, theme="huggingface")
|
71 |
|
72 |
llm.close()
|