Spaces:
Running
Running
Commit
·
ceb2dbe
1
Parent(s):
1db99e2
See if this works better
Browse files
app.py
CHANGED
@@ -44,7 +44,7 @@ def load_model():
|
|
44 |
# model_id = "deepseek-ai/deepseek-llm-7b-chat"
|
45 |
# model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
|
46 |
|
47 |
-
model_id = "deepseek-ai/DeepSeek-R1-Distill-
|
48 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
49 |
model = AutoModelForCausalLM.from_pretrained(
|
50 |
model_id,
|
@@ -60,7 +60,7 @@ def load_model():
|
|
60 |
return tokenizer, model
|
61 |
|
62 |
tokenizer, model = load_model()
|
63 |
-
prompt = st.text_area("Enter your prompt:", "What is Leonardo
|
64 |
# Example prompt selector
|
65 |
# examples = {
|
66 |
# "🧠 Summary": "Summarize the history of AI in 5 bullet points.",
|
|
|
44 |
# model_id = "deepseek-ai/deepseek-llm-7b-chat"
|
45 |
# model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
|
46 |
|
47 |
+
model_id = "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"
|
48 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
49 |
model = AutoModelForCausalLM.from_pretrained(
|
50 |
model_id,
|
|
|
60 |
return tokenizer, model
|
61 |
|
62 |
tokenizer, model = load_model()
|
63 |
+
prompt = st.text_area("Enter your prompt:", "What company is Leonardo S.p.A.?")
|
64 |
# Example prompt selector
|
65 |
# examples = {
|
66 |
# "🧠 Summary": "Summarize the history of AI in 5 bullet points.",
|