illness_104_24 / config.json
Hannibal046's picture
Upload folder using huggingface_hub
161fba7
raw
history blame contribute delete
540 Bytes
{
"architectures": [
"GridTSTForTimeSeriesPrediction"
],
"attention_dropout": 0.0,
"attention_strategy": "alternate",
"d_model": 96,
"dropout": 0.2,
"ffn_dim": 256,
"head_dropout": 0.0,
"init_std": 0.2,
"label_len": 24,
"model_type": "gridtst",
"norm_type": "layernorm",
"num_channels": 7,
"num_heads": 16,
"num_layers": 3,
"num_patches": 7,
"patch_len": 64,
"qkv_bias": true,
"revin_affine": false,
"seq_len": 104,
"stride": 8,
"torch_dtype": "float32",
"transformers_version": "4.35.2"
}