SekoiaTree commited on
Commit
f2116a9
·
verified ·
1 Parent(s): f9a4f5b

Upload Qwen3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -13
  2. model.safetensors +2 -2
config.json CHANGED
@@ -50,18 +50,7 @@
50
  "quantization_config": {
51
  "config_groups": {
52
  "group_0": {
53
- "input_activations": {
54
- "actorder": null,
55
- "block_structure": null,
56
- "dynamic": true,
57
- "group_size": null,
58
- "num_bits": 8,
59
- "observer": null,
60
- "observer_kwargs": {},
61
- "strategy": "token",
62
- "symmetric": true,
63
- "type": "int"
64
- },
65
  "output_activations": null,
66
  "targets": [
67
  "Linear"
@@ -80,7 +69,7 @@
80
  }
81
  }
82
  },
83
- "format": "int-quantized",
84
  "global_compression_ratio": null,
85
  "ignore": [
86
  "lm_head"
 
50
  "quantization_config": {
51
  "config_groups": {
52
  "group_0": {
53
+ "input_activations": null,
 
 
 
 
 
 
 
 
 
 
 
54
  "output_activations": null,
55
  "targets": [
56
  "Linear"
 
69
  }
70
  }
71
  },
72
+ "format": "pack-quantized",
73
  "global_compression_ratio": null,
74
  "ignore": [
75
  "lm_head"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:21b5639e5e8586621c311d353193b0c86eaac44600f53234477054ebb0b00b4f
3
- size 752443408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2809587d85194d1a0c17bc2cb65e1377525b2caa028a68573952508eec9b0e4
3
+ size 752467504