SekoiaTree commited on
Commit
fc35f27
·
verified ·
1 Parent(s): f2116a9

Upload Qwen3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +13 -30
  2. model.safetensors +2 -2
config.json CHANGED
@@ -48,36 +48,19 @@
48
  "num_hidden_layers": 28,
49
  "num_key_value_heads": 8,
50
  "quantization_config": {
51
- "config_groups": {
52
- "group_0": {
53
- "input_activations": null,
54
- "output_activations": null,
55
- "targets": [
56
- "Linear"
57
- ],
58
- "weights": {
59
- "actorder": null,
60
- "block_structure": null,
61
- "dynamic": false,
62
- "group_size": null,
63
- "num_bits": 8,
64
- "observer": "minmax",
65
- "observer_kwargs": {},
66
- "strategy": "channel",
67
- "symmetric": true,
68
- "type": "int"
69
- }
70
- }
71
- },
72
- "format": "pack-quantized",
73
- "global_compression_ratio": null,
74
- "ignore": [
75
- "lm_head"
76
- ],
77
- "kv_cache_scheme": null,
78
- "quant_method": "compressed-tensors",
79
- "quantization_status": "compressed",
80
- "sparsity_config": {}
81
  },
82
  "rms_norm_eps": 1e-06,
83
  "rope_scaling": null,
 
48
  "num_hidden_layers": 28,
49
  "num_key_value_heads": 8,
50
  "quantization_config": {
51
+ "_load_in_4bit": true,
52
+ "_load_in_8bit": false,
53
+ "bnb_4bit_compute_dtype": "float32",
54
+ "bnb_4bit_quant_storage": "uint8",
55
+ "bnb_4bit_quant_type": "fp4",
56
+ "bnb_4bit_use_double_quant": false,
57
+ "llm_int8_enable_fp32_cpu_offload": false,
58
+ "llm_int8_has_fp16_weight": false,
59
+ "llm_int8_skip_modules": null,
60
+ "llm_int8_threshold": 6.0,
61
+ "load_in_4bit": true,
62
+ "load_in_8bit": false,
63
+ "quant_method": "bitsandbytes"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  },
65
  "rms_norm_eps": 1e-06,
66
  "rope_scaling": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2809587d85194d1a0c17bc2cb65e1377525b2caa028a68573952508eec9b0e4
3
- size 752467504
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29c472906a077e3bdfc9d392b4ec01b26b699ce093559c330e727d469e972354
3
+ size 559155940