pp3232133 commited on
Commit
dd1472d
·
1 Parent(s): 396d0bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import gradio as gr
2
- from transformers import AutoModelForCausalLM, AutoTokenizer, HfFolder
3
 
4
  # Wczytanie tokena z pliku konfiguracyjnego
5
  with open("config.txt", "r") as f:
@@ -11,16 +11,12 @@ with open("config.txt", "r") as f:
11
  config[key] = value
12
 
13
  model_name = config.get("repo_id", "")
14
- repo_type = config.get("repo_type", "")
15
  token = config.get("token", "")
16
 
17
- # Utworzenie folderu Hugging Face z tokenem uwierzytelniającym
18
- hf_folder = HfFolder(model_name, repo_type, token=token)
19
-
20
  # Wczytanie własnego modelu chatbota z Hugging Face
21
  if model_name == "pp3232133/pp3232133-distilgpt2-wikitext2":
22
- tokenizer = AutoTokenizer.from_pretrained(model_name, repo_path=hf_folder)
23
- model = AutoModelForCausalLM.from_pretrained(model_name, repo_path=hf_folder)
24
 
25
  # Funkcja obsługująca wejście i wyjście dla interfejsu Gradio
26
  def chatbot_interface(input_text):
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
  # Wczytanie tokena z pliku konfiguracyjnego
5
  with open("config.txt", "r") as f:
 
11
  config[key] = value
12
 
13
  model_name = config.get("repo_id", "")
 
14
  token = config.get("token", "")
15
 
 
 
 
16
  # Wczytanie własnego modelu chatbota z Hugging Face
17
  if model_name == "pp3232133/pp3232133-distilgpt2-wikitext2":
18
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
19
+ model = AutoModelForCausalLM.from_pretrained(model_name)
20
 
21
  # Funkcja obsługująca wejście i wyjście dla interfejsu Gradio
22
  def chatbot_interface(input_text):