>>106296055
what script? ive been using musubi-tuner & include an activation tag at the front of my captions, but none of the loras needed it in the prompt as they tend to get overbaked
accelerate launch src/musubi_tuner/qwen_image_train_network.py \
--dit models/qwen_image_bf16.safetensors \
--vae models/diffusion_pytorch_model.safetensors \
--text_encoder models/qwen_2.5_vl_7b.safetensors \
--dataset_config "${DATASET_CONFIG}" \
--sdpa \
--mixed_precision bf16 \
--timestep_sampling shift \
--weighting_scheme none \
--discrete_flow_shift 3.0 \
--optimizer_type adamw8bit \
--learning_rate 1e-4 \
--gradient_checkpointing \
--max_data_loader_n_workers 2 \
--persistent_data_loader_workers \
--network_module networks.lora_qwen_image \
--network_dim 32 \
--network_alpha 32 \
--max_train_epochs 10 \
--save_every_n_epochs 1 \
--seed 42 \
--output_dir "${OUTPUT_DIR}" \
--output_name "qwen-${DATASET_NAME}" \
--logging_dir "${LOG_DIR}"