LAM-20K / config.json
Ethan18's picture
Upload folder using huggingface_hub
be1ae9b verified
{
"add_teeth": false,
"encoder_feat_dim": 1024,
"encoder_freeze": false,
"encoder_grad_ckpt": true,
"encoder_model_name": "dinov2_vitl14_reg",
"encoder_type": "dinov2_fusion",
"expr_param_dim": 10,
"fix_opacity": false,
"fix_rotation": false,
"flame_subdivide_num": 1,
"flame_type": "flame",
"gs_clip_scaling": 0.01,
"gs_mlp_network_config": {
"activation": "silu",
"n_hidden_layers": 2,
"n_neurons": 512
},
"gs_query_dim": 1024,
"gs_sh": 3,
"gs_use_rgb": true,
"gs_xyz_offset_max_step": 0.2,
"has_disc": false,
"human_model_path": "./pretrained_models/human_model_files",
"latent_query_points_type": "e2e_flame",
"oral_mesh_flag": false,
"pcl_dim": 1024,
"scale_sphere": false,
"shape_param_dim": 10,
"teeth_bs_flag": false,
"tf_grad_ckpt": true,
"transformer_dim": 1024,
"transformer_heads": 16,
"transformer_layers": 10,
"transformer_type": "sd3_cond",
"use_32d": false,
"use_conf_map": false,
"use_dual_attention": false,
"use_gag": false,
"use_projection_enhancement": false,
"use_sr": false,
"use_sym_proj": false
}