LHM-1B-HF / config.json
Last commit not found
{
"cano_pose_type": 1,
"dense_sample_pts": 40000,
"encoder_feat_dim": 1024,
"encoder_freeze": false,
"encoder_grad_ckpt": true,
"encoder_model_name": "dinov2_vitl14_reg",
"encoder_type": "dinov2_fusion",
"expr_param_dim": 100,
"facesr": true,
"fine_encoder_feat_dim": 1536,
"fine_encoder_freeze": true,
"fine_encoder_model_name": "./pretrained_models/sapiens/pretrained/checkpoints/sapiens_1b/sapiens_1b_epoch_173_torchscript.pt2",
"fine_encoder_type": "sapiens",
"fix_opacity": false,
"fix_rotation": false,
"gs_clip_scaling": [
100,
0.01,
0.05,
3000
],
"gs_mlp_network_config": {
"activation": "silu",
"n_hidden_layers": 2,
"n_neurons": 512
},
"gs_query_dim": 1024,
"gs_sh": 3,
"gs_use_rgb": true,
"gs_xyz_offset_max_step": 1.0,
"human_model_path": "./pretrained_models/human_model_files",
"latent_query_points_type": "e2e_smplx_sub1",
"model_name": "SapDinoLRMBHSD3_5",
"pcl_dim": 1024,
"shape_param_dim": 10,
"smplx_subdivide_num": 1,
"smplx_type": "smplx_2",
"tf_grad_ckpt": true,
"transformer_dim": 1024,
"transformer_heads": 16,
"transformer_layers": 15,
"transformer_type": "sd3_mm_bh_cond",
"use_face_id": true
}