mlinmg commited on
Commit
ac3f40e
·
verified ·
1 Parent(s): ba2f2a3

Add files using upload-large-folder tool

Browse files
config.json CHANGED
@@ -43,7 +43,7 @@
43
  }
44
  },
45
  "format": "float-quantized",
46
- "global_compression_ratio": 1.2225298370318187,
47
  "ignore": [
48
  "audio_tower.layers.0.self_attn.k_proj",
49
  "audio_tower.layers.0.self_attn.v_proj",
@@ -237,6 +237,7 @@
237
  "audio_tower.layers.31.self_attn.out_proj",
238
  "audio_tower.layers.31.fc1",
239
  "audio_tower.layers.31.fc2",
 
240
  "language_model.lm_head"
241
  ],
242
  "kv_cache_scheme": null,
 
43
  }
44
  },
45
  "format": "float-quantized",
46
+ "global_compression_ratio": 1.2213219892928018,
47
  "ignore": [
48
  "audio_tower.layers.0.self_attn.k_proj",
49
  "audio_tower.layers.0.self_attn.v_proj",
 
237
  "audio_tower.layers.31.self_attn.out_proj",
238
  "audio_tower.layers.31.fc1",
239
  "audio_tower.layers.31.fc2",
240
+ "multi_modal_projector.linear",
241
  "language_model.lm_head"
242
  ],
243
  "kv_cache_scheme": null,
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4fcb01ea001404317f93f1f862b7efb4dfef599613c1c832e2aed20a955b754a
3
- size 4987511472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e24e21d15ba620564d61c906c24891623b0961b1287732860293dc016481be49
3
+ size 4992746040
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 10315669504
4
  },
5
  "weight_map": {
6
  "audio_tower.conv1.bias": "model-00001-of-00003.safetensors",
@@ -1102,7 +1102,6 @@
1102
  "language_model.model.layers.9.self_attn.v_proj.weight_scale": "model-00001-of-00003.safetensors",
1103
  "language_model.model.norm.weight": "model-00002-of-00003.safetensors",
1104
  "multi_modal_projector.linear.bias": "model-00001-of-00003.safetensors",
1105
- "multi_modal_projector.linear.weight": "model-00001-of-00003.safetensors",
1106
- "multi_modal_projector.linear.weight_scale": "model-00001-of-00003.safetensors"
1107
  }
1108
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 10320904192
4
  },
5
  "weight_map": {
6
  "audio_tower.conv1.bias": "model-00001-of-00003.safetensors",
 
1102
  "language_model.model.layers.9.self_attn.v_proj.weight_scale": "model-00001-of-00003.safetensors",
1103
  "language_model.model.norm.weight": "model-00002-of-00003.safetensors",
1104
  "multi_modal_projector.linear.bias": "model-00001-of-00003.safetensors",
1105
+ "multi_modal_projector.linear.weight": "model-00001-of-00003.safetensors"
 
1106
  }
1107
  }
recipe.yaml CHANGED
@@ -1,6 +1,6 @@
1
  default_stage:
2
  default_modifiers:
3
  QuantizationModifier:
4
- ignore: ['re:.*lm_head', 're:audio_tower.*']
5
  targets: [Linear]
6
  scheme: FP8_DYNAMIC
 
1
  default_stage:
2
  default_modifiers:
3
  QuantizationModifier:
4
+ ignore: ['re:.*lm_head', 're:audio_tower.*', 're:multi_modal_projector.*']
5
  targets: [Linear]
6
  scheme: FP8_DYNAMIC