| { | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "mistralai/Mistral-7B-v0.1", | |
| "bias": "none", | |
| "enable_lora": null, | |
| "fan_in_fan_out": false, | |
| "inference_mode": true, | |
| "lora_alpha": 16, | |
| "lora_dropout": 0.05, | |
| "merge_weights": false, | |
| "modules_to_save": null, | |
| "number_of_adapter_pre_layer": 8, | |
| "peft_type": "M_LORA", | |
| "r": 16, | |
| "target_modules": [ | |
| "v_proj", | |
| "k_proj" | |
| ], | |
| "task_type": "CAUSAL_LM" | |
| } |