add models
Browse files
model.py
CHANGED
@@ -4,16 +4,16 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, GPT2LMHeadModel, G
|
|
4 |
|
5 |
# Dictionary of models and paths
|
6 |
model_dict = {
|
7 |
-
"GPT2": {"path": "
|
8 |
-
"GPT2-medium": {"path": "
|
9 |
-
"GPT2-large": {"path": "
|
10 |
-
"GPT2-persian": {"path": "
|
11 |
-
"codegen": {"path": "
|
12 |
-
"dialoGPT": {"path": "
|
13 |
-
"dialoGPT-medium": {"path": "
|
14 |
-
"dialoGPT-large": {"path": "
|
15 |
-
"GPT-Neo-125M": {"path": "
|
16 |
-
"bert-emotion": {"path": "
|
17 |
}
|
18 |
|
19 |
loaded_models = {}
|
|
|
4 |
|
5 |
# Dictionary of models and paths
|
6 |
model_dict = {
|
7 |
+
"GPT2": {"path": "openai-community/gpt2", "library": GPT2LMHeadModel, "tokenizer": GPT2Tokenizer, "use_pipeline": False},
|
8 |
+
"GPT2-medium": {"path": "openai-community/gpt2-medium", "library": GPT2LMHeadModel, "tokenizer": GPT2Tokenizer, "use_pipeline": False},
|
9 |
+
"GPT2-large": {"path": "openai-community/gpt2-large", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
10 |
+
"GPT2-persian": {"path": "flax-community/gpt2-medium-persian", "library": GPT2LMHeadModel, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
11 |
+
"codegen": {"path": "Salesforce/codegen-350M-mono", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
12 |
+
"dialoGPT": {"path": "microsoft/DialoGPT-small", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
13 |
+
"dialoGPT-medium": {"path": "microsoft/DialoGPT-medium", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
14 |
+
"dialoGPT-large": {"path": "microsoft/DialoGPT-large", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": False},
|
15 |
+
"GPT-Neo-125M": {"path": "EleutherAI/gpt-neo-125m", "library": AutoModelForCausalLM, "tokenizer": AutoTokenizer, "use_pipeline": True}, # اضافه کردن مدل جدید
|
16 |
+
"bert-emotion": {"path": "bhadresh-savani/distilbert-base-uncased-emotion", "library": AutoModelForSequenceClassification, "tokenizer": AutoTokenizer, "use_pipeline": True},
|
17 |
}
|
18 |
|
19 |
loaded_models = {}
|