davidizzle commited on
Commit
ceb2dbe
·
1 Parent(s): 1db99e2

See if this works better

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -44,7 +44,7 @@ def load_model():
44
  # model_id = "deepseek-ai/deepseek-llm-7b-chat"
45
  # model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
46
 
47
- model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
48
  tokenizer = AutoTokenizer.from_pretrained(model_id)
49
  model = AutoModelForCausalLM.from_pretrained(
50
  model_id,
@@ -60,7 +60,7 @@ def load_model():
60
  return tokenizer, model
61
 
62
  tokenizer, model = load_model()
63
- prompt = st.text_area("Enter your prompt:", "What is Leonardo, the company with the red logo?")
64
  # Example prompt selector
65
  # examples = {
66
  # "🧠 Summary": "Summarize the history of AI in 5 bullet points.",
 
44
  # model_id = "deepseek-ai/deepseek-llm-7b-chat"
45
  # model_id = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
46
 
47
+ model_id = "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"
48
  tokenizer = AutoTokenizer.from_pretrained(model_id)
49
  model = AutoModelForCausalLM.from_pretrained(
50
  model_id,
 
60
  return tokenizer, model
61
 
62
  tokenizer, model = load_model()
63
+ prompt = st.text_area("Enter your prompt:", "What company is Leonardo S.p.A.?")
64
  # Example prompt selector
65
  # examples = {
66
  # "🧠 Summary": "Summarize the history of AI in 5 bullet points.",