File size: 724 Bytes
71dfa12 0f3e6c6 71dfa12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
{
"architectures": [
"GPTModelForTextGeneration"
],
"auto_map": {
"AutoConfig": "configuration_gpt.GPTConfig",
"AutoModelForCausalLM": "modeling_gpt.GPTModelForTextGeneration"
},
"block_size": 1024,
"custom_pipelines": {
"text-generation": {
"default": {
"model": {
"pt": "samkeet/GPT_124M"
}
},
"impl": "pipeline_gpt.GPT124MTextGenerationPipeline",
"pt": [
"AutoModelForCausalLM"
],
"tf": [],
"type": "text"
}
},
"model_type": "custom_gpt",
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"torch_dtype": "float32",
"transformers_version": "4.48.0",
"vocab_size": 50304
}
|