LibreFLUX / transformer /config.json
jimmycarter's picture
config
f342896 verified
raw
history blame
443 Bytes
{
"_class_name": "FluxTransformer2DModelWithMasking",
"_diffusers_version": "0.30.3",
"_name_or_path": "/home/user/storage2/flux_merges/dedistilled",
"attention_head_dim": 128,
"axes_dims_rope": [
16,
56,
56
],
"guidance_embeds": false,
"in_channels": 64,
"joint_attention_dim": 4096,
"num_attention_heads": 24,
"num_layers": 19,
"num_single_layers": 38,
"patch_size": 1,
"pooled_projection_dim": 768
}