ShantanuT01 commited on
Commit
7358c51
·
verified ·
1 Parent(s): 1d7171e

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "meta-llama/Llama-3.2-1B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "huihui-ai/Llama-3.2-1B-Instruct-abliterated",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c6b7e25c877e97deef6cedb3a43231dbf357131b309768ca96eac83a2025aa7
3
  size 4943274328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1b8e70f9dc0f970d59a9454ce43771441ebdef87464d7b615ae35ddb7da1e33
3
  size 4943274328
special_tokens_map.json CHANGED
@@ -12,5 +12,12 @@
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
 
 
 
 
 
 
 
15
  }
16
  }
 
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|eot_id|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
  }
23
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
- size 17209920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65ff5472d095ccd9332d9e723153d7bc7226cb6be9c1bffda738b5ba2e71bf26
3
+ size 17210084
tokenizer_config.json CHANGED
@@ -2054,10 +2054,15 @@
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
2056
  "extra_special_tokens": {},
 
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 131072,
 
 
 
 
2062
  "tokenizer_class": "PreTrainedTokenizer"
2063
  }
 
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|eot_id|>",
2056
  "extra_special_tokens": {},
2057
+ "max_length": null,
2058
  "model_input_names": [
2059
  "input_ids",
2060
  "attention_mask"
2061
  ],
2062
  "model_max_length": 131072,
2063
+ "pad_to_multiple_of": null,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "pad_token_type_id": 0,
2066
+ "padding_side": "left",
2067
  "tokenizer_class": "PreTrainedTokenizer"
2068
  }