Add files using upload-large-folder tool
Browse files- Llama-4-Scout-17B-16E-Instruct-UD-Q2_K_XL.gguf +2 -2
- Llama-4-Scout-17B-16E-Instruct-UD-Q3_K_XL.gguf +2 -2
- Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00001-of-00002.gguf +2 -2
- UD-Q4_K_XL/Llama-4-Scout-17B-16E-Instruct-UD-Q4_K_XL-00001-of-00002.gguf +2 -2
- config.json +1 -1
- mmproj-F16.gguf +1 -1
- mmproj-F32.gguf +1 -1
Llama-4-Scout-17B-16E-Instruct-UD-Q2_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce0b9ca08c974ce4d2d1e9a405f49e7892423bab5e636fbc1bc1e3e38ac0ce0c
|
3 |
+
size 42376957408
|
Llama-4-Scout-17B-16E-Instruct-UD-Q3_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c2925898c6b78d5fbcd0c6faf4425db1ff974d56d0ed2c40577520f140477b8f
|
3 |
+
size 49019031008
|
Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00001-of-00002.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebc59d1e1b2b5184322e625e31785d8b528caee0b06b47870f0796742162da67
|
3 |
+
size 49848379744
|
UD-Q4_K_XL/Llama-4-Scout-17B-16E-Instruct-UD-Q4_K_XL-00001-of-00002.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:42dbf297393eb73b9aca6979aea2752b176793398f0472dbdcc09ffa28e3b6a4
|
3 |
+
size 49647697664
|
config.json
CHANGED
@@ -156,7 +156,7 @@
|
|
156 |
},
|
157 |
"tie_word_embeddings": false,
|
158 |
"torch_dtype": "bfloat16",
|
159 |
-
"transformers_version": "4.52.
|
160 |
"unsloth_fixed": true,
|
161 |
"vision_config": {
|
162 |
"attention_dropout": 0.0,
|
|
|
156 |
},
|
157 |
"tie_word_embeddings": false,
|
158 |
"torch_dtype": "bfloat16",
|
159 |
+
"transformers_version": "4.52.3",
|
160 |
"unsloth_fixed": true,
|
161 |
"vision_config": {
|
162 |
"attention_dropout": 0.0,
|
mmproj-F16.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1746780928
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cccf45eb0c1d980a2999780f68c01506f47a571b62f6bb9e17297cbe7ceaca83
|
3 |
size 1746780928
|
mmproj-F32.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3487762176
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:329674145560ebffc2dfddbf3c5304159eaea24d19d2a9571b94b8b5e8dacffa
|
3 |
size 3487762176
|