blnk_opj / dkfotgr /db_config.json
Dobby2022's picture
Upload 45 files
38e452a
{
"adamw_weight_decay": 0.01,
"attention": "xformers",
"cache_latents": true,
"clip_skip": 1,
"concepts_list": [
{
"class_data_dir": "",
"class_guidance_scale": 7.5,
"class_infer_steps": 40,
"class_negative_prompt": "",
"class_prompt": "photo of man",
"class_token": "",
"instance_data_dir": "I:\\_SD\\LORA\\_projects\\dkfotgr\\src\\100_dkfotgr",
"instance_prompt": "dkfotgr man",
"instance_token": "",
"is_valid": true,
"n_save_sample": 1,
"num_class_images_per": 4,
"sample_seed": -1,
"save_guidance_scale": 7.5,
"save_infer_steps": 20,
"save_sample_negative_prompt": "",
"save_sample_prompt": "photo of dkfotgr man",
"save_sample_template": ""
}
],
"concepts_path": "",
"custom_model_name": "",
"deis_train_scheduler": false,
"deterministic": false,
"ema_predict": false,
"epoch": 0,
"epoch_pause_frequency": 0,
"epoch_pause_time": 0,
"freeze_clip_normalization": false,
"gradient_accumulation_steps": 1,
"gradient_checkpointing": true,
"gradient_set_to_none": true,
"graph_smoothing": 50,
"half_model": false,
"train_unfrozen": false,
"has_ema": false,
"hflip": false,
"infer_ema": false,
"initial_revision": 0,
"learning_rate": 2e-06,
"learning_rate_min": 1e-06,
"lifetime_revision": 0,
"lora_learning_rate": 0.0002,
"lora_model_name": null,
"lora_unet_rank": 4,
"lora_txt_rank": 4,
"lora_txt_learning_rate": 0.0002,
"lora_txt_weight": 1,
"lora_weight": 1,
"lr_cycles": 1,
"lr_factor": 0.5,
"lr_power": 1,
"lr_scale_pos": 0.5,
"lr_scheduler": "constant_with_warmup",
"lr_warmup_steps": 0,
"max_token_length": 75,
"mixed_precision": "fp16",
"model_name": "dkfotgr",
"model_dir": "I:\\_SD\\stable-diffusion-webui\\models\\dreambooth\\dkfotgr",
"model_path": "I:\\_SD\\stable-diffusion-webui\\models\\dreambooth\\dkfotgr",
"num_train_epochs": 150,
"offset_noise": 0,
"optimizer": "8Bit Adam",
"pad_tokens": true,
"pretrained_model_name_or_path": "I:\\_SD\\stable-diffusion-webui\\models\\dreambooth\\dkfotgr\\working",
"pretrained_vae_name_or_path": "",
"prior_loss_scale": false,
"prior_loss_target": 100.0,
"prior_loss_weight": 0.75,
"prior_loss_weight_min": 0.1,
"resolution": 512,
"revision": 0,
"sample_batch_size": 1,
"sanity_prompt": "",
"sanity_seed": 420420.0,
"save_ckpt_after": true,
"save_ckpt_cancel": false,
"save_ckpt_during": false,
"save_ema": true,
"save_embedding_every": 25,
"save_lora_after": true,
"save_lora_cancel": false,
"save_lora_during": false,
"save_preview_every": 5,
"save_safetensors": true,
"save_state_after": false,
"save_state_cancel": false,
"save_state_during": false,
"scheduler": "DEISMultistep",
"shuffle_tags": true,
"snapshot": [],
"split_loss": true,
"src": "E:\\-AI\\openjourney-v2.ckpt",
"stop_text_encoder": 1,
"strict_tokens": false,
"tf32_enable": false,
"train_batch_size": 1,
"train_imagic": false,
"train_unet": true,
"use_concepts": false,
"use_ema": false,
"use_lora": true,
"use_lora_extended": false,
"use_subdir": true,
"v2": false
}