| { | |
| "model": "meta-llama/Meta-Llama-3.1-8B", | |
| "project_name": "l31-bod-qanda", | |
| "data_path": "/home/ryan/Documents/ai/autotrain/mashup/bod_questions", | |
| "train_split": "train", | |
| "valid_split": null, | |
| "add_eos_token": false, | |
| "block_size": -1, | |
| "model_max_length": 8192, | |
| "padding": null, | |
| "trainer": "sft", | |
| "use_flash_attention_2": false, | |
| "log": "wandb", | |
| "disable_gradient_checkpointing": false, | |
| "logging_steps": -1, | |
| "eval_strategy": "epoch", | |
| "save_total_limit": 1, | |
| "auto_find_batch_size": true, | |
| "mixed_precision": "fp16", | |
| "lr": 0.00002, | |
| "epochs": 80, | |
| "batch_size": 2, | |
| "warmup_ratio": 0.1, | |
| "gradient_accumulation": 1, | |
| "optimizer": "adamw_torch", | |
| "scheduler": "linear", | |
| "weight_decay": 0.0, | |
| "max_grad_norm": 1.0, | |
| "seed": 42, | |
| "chat_template": null, | |
| "quantization": "int8", | |
| "target_modules": "all-linear", | |
| "merge_adapter": false, | |
| "peft": true, | |
| "lora_r": 16, | |
| "lora_alpha": 32, | |
| "lora_dropout": 0.05, | |
| "model_ref": null, | |
| "dpo_beta": 0.1, | |
| "max_prompt_length": 128, | |
| "max_completion_length": null, | |
| "prompt_text_column": "prompt", | |
| "text_column": "text", | |
| "rejected_text_column": "rejected", | |
| "push_to_hub": false, | |
| "username": null, | |
| "unsloth": false | |
| } | |