Trained on 19 images of Meowbah, no natural language was used. Can be used as a style lora too.
[general]
keep_tokens_separator = "|||"
shuffle_caption = true
flip_aug = false
caption_extension = ".txt"
enable_bucket = true
bucket_no_upscale = true
bucket_reso_steps = 32
min_bucket_reso = 288
max_bucket_reso = 2048
[[datasets]]
resolution = 768
[[datasets.subsets]]
caption_tag_dropout_rate = 0.1
num_repeats = 11
image_dir = ""[network_arguments]
network_dim = 64
network_alpha = 32
network_module = "networks.lora_anima"
network_train_unet_only = true
[optimizer_arguments]
learning_rate = 4e-4
lr_scheduler = "cosine_with_restarts"
lr_scheduler_num_cycles = 3
lr_scheduler_power = 0
lr_warmup_steps = 0.1
optimizer_type = "came_pytorch.CAME"
optimizer_args = [ "weight_decay=0.01", "enable_cautious_update=True", "enable_cautious_weight_decay=True", "enable_stochastic_rounding=True", "enable_8bit=True"]
[training_arguments]
pretrained_model_name_or_path = ""
qwen3 = ""
vae = ""
max_train_epochs = 15
train_batch_size = 32
seed = 42
xformers = false
use_flash_attn = false
sdpa = true
lowram = false
no_half_vae = false
gradient_checkpointing = true
gradient_accumulation_steps = 1
max_data_loader_n_workers = 4
persistent_data_loader_workers = true
mixed_precision = "bf16"
full_bf16 = false
cache_latents = true
cache_latents_to_disk = true
cache_text_encoder_outputs = false
[sampling]
sample_every_n_epochs = 1
sample_prompts = ""
sample_sampler = "euler_a"
sample_at_first = true
[saving_arguments]
save_precision = "bf16"
save_model_as = "safetensors"
save_every_n_epochs = 1
save_last_n_epochs = 7
output_name = ""
output_dir = ""
log_prefix = ""
logging_dir = ""
wandb_run_name = ""
wandb_api_key = ""
log_with = "wandb"Recommended settings:
Sampler: euler/er_sde/euler_ancestral
CFG: 5.0
Steps: 20
weight: 1.0
