|
from transformers.utils import ( |
|
OptionalDependencyNotAvailable, |
|
_LazyModule, |
|
is_torch_available, |
|
) |
|
|
|
try: |
|
if not is_torch_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
from .modeling_optrecastmlp_llama import ( |
|
OPTRECASTMLP_llamaModel, |
|
OPTRECASTMLP_LlamaForCausalLM, |
|
) |
|
from .configuration_optrecastmlp_llama import OPTRECASTMLP_llama |
|
|
|
from transformers import AutoConfig, AutoModel, AutoModelForCausalLM |
|
|
|
|
|
AutoConfig.register("optrecastmlp_llama", OPTRECASTMLP_llama) |
|
AutoModel.register(OPTRECASTMLP_llama, OPTRECASTMLP_llamaModel) |
|
AutoModelForCausalLM.register(OPTRECASTMLP_llama, OPTRECASTMLP_LlamaForCausalLM) |
|
|
|
_import_structure = { |
|
"configuration_optrecastmlp_llama": ["OPTRECASTMLP_llama"], |
|
"modeling_optrecastmlp_llama": ["OPTRECASTMLP_llamaModel", "OPTRECASTMLP_LlamaForCausalLM"], |
|
} |
|
|
|
__all__ = ["OPTRECASTMLP_llamaModel", "OPTRECASTMLP_LlamaForCausalLM", "OPTRECASTMLP_llama"] |
|
|