scGPT / args.json
MohamedMabrouk's picture
Upload 3 files
72555fe verified
raw
history blame
1.5 kB
{
"data_source": "/scratch/ssd004/datasets/cellxgene/scb_strict/human",
"save_dir": "/scratch/ssd004/datasets/cellxgene/save/cellxgene_census_human-Dec18-13-52-2023",
"load_model": "/scratch/ssd004/datasets/cellxgene/save/scGPT_human",
"n_hvg": null,
"valid_size_or_ratio": 0.003,
"dist_backend": "nccl",
"grad_accu_steps": 1,
"pad_token": "<pad>",
"input_style": "binned",
"input_emb_style": "continuous",
"n_bins": 51,
"max_seq_len": 1200,
"training_tasks": "both",
"dist_url": "tcp://gpu183.cluster.local:54165",
"mask_ratio": [
0.25,
0.5,
0.75
],
"trunc_by_sample": true,
"vocab_path": "/scratch/ssd004/datasets/cellxgene/scFormer/scformer/tokenizer/default_census_vocab.json",
"rank": 0,
"batch_size": 24,
"eval_batch_size": 48,
"epochs": 10,
"lr": 0.0001,
"scheduler_interval": 100,
"scheduler_factor": 0.99,
"warmup_ratio_or_step": 10000.0,
"no_cls": false,
"no_cce": true,
"fp16": true,
"fast_transformer": true,
"annotation_source": "/scratch/ssd004/datasets/cellxgene/tabula_sapiens/parquet/",
"annotation_valid_size_or_ratio": 0.1,
"nlayers": 12,
"nheads": 8,
"embsize": 512,
"d_hid": 512,
"dropout": 0.2,
"n_layers_cls": 3,
"annote_max_seq_len": 5000,
"log_interval": 500,
"save_interval": 1000,
"mask_value": -1,
"pad_value": -2,
"USE_CLS": true,
"USE_CCE": false,
"MVC": true,
"USE_GENERATIVE_TRAINING": true,
"world_size": 8,
"distributed": true,
"local_rank": 0,
"gpu": 0
}