Qwen3-VL-8B-Instruct-FP8-Static / angelslim_config.json
woodchen7's picture
Upload angelslim_config.json with huggingface_hub
6a50344 verified
{
"model_config": {
"name": "Qwen3VL",
"model_path": "Base Model Path",
"trust_remote_code": true,
"torch_dtype": "auto",
"device_map": "auto",
"low_cpu_mem_usage": true,
"use_cache": false,
"cache_dir": null
},
"compression_config": {
"name": [
"PTQ"
],
"quantization": {
"name": "fp8_static",
"save_name": "compressed-tensors",
"bits": 8,
"quant_method": {
"weight": "per-tensor",
"activation": "per-tensor"
},
"quant_helpers": [],
"smooth_alpha": 0.5,
"low_memory": false,
"cpu_convert": false,
"modules_to_quantize": [],
"zero_point": true,
"mse_range": false,
"ignore_layers": [
"model.visual.patch_embed.proj",
"model.lm_head",
"model.language_model.embed_tokens",
"model.visual.blocks.0.attn.qkv",
"model.visual.blocks.0.attn.proj",
"model.visual.blocks.0.mlp.linear_fc1",
"model.visual.blocks.0.mlp.linear_fc2",
"model.visual.blocks.1.attn.qkv",
"model.visual.blocks.1.attn.proj",
"model.visual.blocks.1.mlp.linear_fc1",
"model.visual.blocks.1.mlp.linear_fc2",
"model.visual.blocks.2.attn.qkv",
"model.visual.blocks.2.attn.proj",
"model.visual.blocks.2.mlp.linear_fc1",
"model.visual.blocks.2.mlp.linear_fc2",
"model.visual.blocks.3.attn.qkv",
"model.visual.blocks.3.attn.proj",
"model.visual.blocks.3.mlp.linear_fc1",
"model.visual.blocks.3.mlp.linear_fc2",
"model.visual.blocks.4.attn.qkv",
"model.visual.blocks.4.attn.proj",
"model.visual.blocks.4.mlp.linear_fc1",
"model.visual.blocks.4.mlp.linear_fc2",
"model.visual.blocks.5.attn.qkv",
"model.visual.blocks.5.attn.proj",
"model.visual.blocks.5.mlp.linear_fc1",
"model.visual.blocks.5.mlp.linear_fc2",
"model.visual.blocks.6.attn.qkv",
"model.visual.blocks.6.attn.proj",
"model.visual.blocks.6.mlp.linear_fc1",
"model.visual.blocks.6.mlp.linear_fc2",
"model.visual.blocks.7.attn.qkv",
"model.visual.blocks.7.attn.proj",
"model.visual.blocks.7.mlp.linear_fc1",
"model.visual.blocks.7.mlp.linear_fc2",
"model.visual.blocks.8.attn.qkv",
"model.visual.blocks.8.attn.proj",
"model.visual.blocks.8.mlp.linear_fc1",
"model.visual.blocks.8.mlp.linear_fc2",
"model.visual.blocks.9.attn.qkv",
"model.visual.blocks.9.attn.proj",
"model.visual.blocks.9.mlp.linear_fc1",
"model.visual.blocks.9.mlp.linear_fc2",
"model.visual.blocks.10.attn.qkv",
"model.visual.blocks.10.attn.proj",
"model.visual.blocks.10.mlp.linear_fc1",
"model.visual.blocks.10.mlp.linear_fc2",
"model.visual.blocks.11.attn.qkv",
"model.visual.blocks.11.attn.proj",
"model.visual.blocks.11.mlp.linear_fc1",
"model.visual.blocks.11.mlp.linear_fc2",
"model.visual.blocks.12.attn.qkv",
"model.visual.blocks.12.attn.proj",
"model.visual.blocks.12.mlp.linear_fc1",
"model.visual.blocks.12.mlp.linear_fc2",
"model.visual.blocks.13.attn.qkv",
"model.visual.blocks.13.attn.proj",
"model.visual.blocks.13.mlp.linear_fc1",
"model.visual.blocks.13.mlp.linear_fc2",
"model.visual.blocks.14.attn.qkv",
"model.visual.blocks.14.attn.proj",
"model.visual.blocks.14.mlp.linear_fc1",
"model.visual.blocks.14.mlp.linear_fc2",
"model.visual.blocks.15.attn.qkv",
"model.visual.blocks.15.attn.proj",
"model.visual.blocks.15.mlp.linear_fc1",
"model.visual.blocks.15.mlp.linear_fc2",
"model.visual.blocks.16.attn.qkv",
"model.visual.blocks.16.attn.proj",
"model.visual.blocks.16.mlp.linear_fc1",
"model.visual.blocks.16.mlp.linear_fc2",
"model.visual.blocks.17.attn.qkv",
"model.visual.blocks.17.attn.proj",
"model.visual.blocks.17.mlp.linear_fc1",
"model.visual.blocks.17.mlp.linear_fc2",
"model.visual.blocks.18.attn.qkv",
"model.visual.blocks.18.attn.proj",
"model.visual.blocks.18.mlp.linear_fc1",
"model.visual.blocks.18.mlp.linear_fc2",
"model.visual.blocks.19.attn.qkv",
"model.visual.blocks.19.attn.proj",
"model.visual.blocks.19.mlp.linear_fc1",
"model.visual.blocks.19.mlp.linear_fc2",
"model.visual.blocks.20.attn.qkv",
"model.visual.blocks.20.attn.proj",
"model.visual.blocks.20.mlp.linear_fc1",
"model.visual.blocks.20.mlp.linear_fc2",
"model.visual.blocks.21.attn.qkv",
"model.visual.blocks.21.attn.proj",
"model.visual.blocks.21.mlp.linear_fc1",
"model.visual.blocks.21.mlp.linear_fc2",
"model.visual.blocks.22.attn.qkv",
"model.visual.blocks.22.attn.proj",
"model.visual.blocks.22.mlp.linear_fc1",
"model.visual.blocks.22.mlp.linear_fc2",
"model.visual.blocks.23.attn.qkv",
"model.visual.blocks.23.attn.proj",
"model.visual.blocks.23.mlp.linear_fc1",
"model.visual.blocks.23.mlp.linear_fc2",
"model.visual.blocks.24.attn.qkv",
"model.visual.blocks.24.attn.proj",
"model.visual.blocks.24.mlp.linear_fc1",
"model.visual.blocks.24.mlp.linear_fc2",
"model.visual.blocks.25.attn.qkv",
"model.visual.blocks.25.attn.proj",
"model.visual.blocks.25.mlp.linear_fc1",
"model.visual.blocks.25.mlp.linear_fc2",
"model.visual.blocks.26.attn.qkv",
"model.visual.blocks.26.attn.proj",
"model.visual.blocks.26.mlp.linear_fc1",
"model.visual.blocks.26.mlp.linear_fc2",
"model.visual.merger.linear_fc1",
"model.visual.merger.linear_fc2",
"model.visual.deepstack_merger_list.0.linear_fc1",
"model.visual.deepstack_merger_list.0.linear_fc2",
"model.visual.deepstack_merger_list.1.linear_fc1",
"model.visual.deepstack_merger_list.1.linear_fc2",
"model.visual.deepstack_merger_list.2.linear_fc1",
"model.visual.deepstack_merger_list.2.linear_fc2",
"lm_head"
],
"quant_analyse": false,
"quant_vit": false
},
"cache": null
},
"dataset_config": {
"name": "MultiModalDataset",
"data_path": "Data Path",
"max_seq_length": 4096,
"num_samples": 256,
"batch_size": 1,
"shuffle": false,
"inference_settings": null
},
"global_config": {
"save_path": "Save Model Path",
"max_seq_length": 4096,
"hidden_size": 4096,
"model_arch_type": "qwen3_vl",
"deploy_backend": "vllm"
},
"infer_config": null,
"debug_info": {
"python": "3.10.12 (main, Aug 29 2024, 16:22:46) [GCC 9.3.0]",
"angelslim": {
"name": "angelslim",
"version": "0.0.0.dev0",
"source": "pip"
},
"torch": {
"name": "torch",
"version": "2.6.0",
"source": "pip"
},
"transformers": {
"name": "transformers",
"version": "4.57.1",
"source": "pip"
},
"torch_cuda_version": "12.4"
}
}