giustinod commited on
Commit
a8f6130
1 Parent(s): ffd02ca

Update app.py

Browse files

Modified description and added share=True

Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -8,11 +8,11 @@ import torch
8
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
9
 
10
  DESCRIPTION = """\
11
- # Llama 3.2 3B Instruct
12
 
13
- Llama 3.2 3B is Meta's latest iteration of open LLMs.
14
- This is a demo of [`meta-llama/Llama-3.2-3B-Instruct`](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct), fine-tuned for instruction following.
15
- For more details, please check [our post](https://huggingface.co/blog/llama32).
16
  """
17
 
18
  MAX_MAX_NEW_TOKENS = 2048
@@ -133,4 +133,4 @@ with gr.Blocks(css="style.css", fill_height=True) as demo:
133
  chat_interface.render()
134
 
135
  if __name__ == "__main__":
136
- demo.queue(max_size=20).launch()
 
8
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
9
 
10
  DESCRIPTION = """\
11
+ # TestLogica-AZService
12
 
13
+ TestLogica-AZService è stato ottenuto dal modello Llama 3.2 3B Instruct.
14
+ A partire dal dataset yale-nlp/FOLIO ho concatenato premises e premises-FOL per usarle come richiesta dell'utente, ho concatenato conclusion e conclusion-FOL per usarle come risposta dell'assistente.
15
+ Ho eseguito il training e salvato il modello.
16
  """
17
 
18
  MAX_MAX_NEW_TOKENS = 2048
 
133
  chat_interface.render()
134
 
135
  if __name__ == "__main__":
136
+ demo.queue(max_size=20).launch(share = True)