lzq2021's picture
Upload 16 files
8c6eebb verified
raw
history blame contribute delete
697 Bytes
{
"base_model_name_or_path": null,
"do_qat": false,
"dtype": "float16",
"enable_lora_list": null,
"head_dim": null,
"lora_alpha": 64,
"lora_dropout": 0.1,
"lora_plus_scale": 1.0,
"lora_use_mixer": false,
"loraga": false,
"lorapro": false,
"merge_weights": false,
"mixer_num": 1,
"nola": false,
"nola_basis_num": 1,
"pissa": false,
"r": 32,
"rslora": false,
"scaling": 2.0,
"target_modules": [
".*q_proj$",
".*k_proj$",
".*v_proj$",
".*o_proj$",
".*down_proj$",
".*up_proj$",
".*gate_proj$"
],
"tensor_parallel_degree": 1,
"trainable_bias": null,
"trainable_modules": null,
"use_mora": false,
"use_quick_lora": false
}