Upload all model files
Browse files- LICENSE +71 -0
- README.md +235 -0
- chat_template.jinja +4 -0
- config.json +50 -0
- generation_config.json +7 -0
- model.safetensors +3 -0
- modeling_lfm2.py +945 -0
- requirements.txt +2 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer_config.json +4074 -0
LICENSE
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
LFM Open License v1.0
|
2 |
+
|
3 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
4 |
+
|
5 |
+
1. Definitions.
|
6 |
+
|
7 |
+
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by this document.
|
8 |
+
|
9 |
+
"Licensor" shall mean Liquid AI, Inc.
|
10 |
+
|
11 |
+
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
12 |
+
|
13 |
+
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
14 |
+
|
15 |
+
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
16 |
+
|
17 |
+
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
18 |
+
|
19 |
+
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work.
|
20 |
+
|
21 |
+
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
22 |
+
|
23 |
+
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
24 |
+
|
25 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
26 |
+
|
27 |
+
"Commercial Use" shall mean any use of the Work for direct or indirect commercial advantage or monetary compensation.
|
28 |
+
|
29 |
+
"Qualified Non-Profit Organization" shall mean a Legal Entity that is organized and operated exclusively for religious, charitable, scientific, testing for public safety, literary, or educational purposes, and which is exempt from federal income tax under Section 501(c)(3) of the United States Internal Revenue Code of 1986, as amended, or any equivalent non-profit or charitable organization in a foreign jurisdiction.
|
30 |
+
|
31 |
+
"Non-Commercial or Research Purposes" shall mean purposes that do not involve any use of the Work or a Derivative Work for Commercial Use.
|
32 |
+
|
33 |
+
"Threshold" shall mean annual revenue of 10 million United States dollars ($10,000,000) or more.
|
34 |
+
|
35 |
+
2. Grant of Copyright License. Subject to the terms and conditions of this License, including the Commercial Use limitation set forth in Section 5, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
36 |
+
|
37 |
+
3. Grant of Patent License. Subject to the terms and conditions of this License, including the Commercial Use limitation set forth in Section 5, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
38 |
+
|
39 |
+
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
40 |
+
|
41 |
+
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
42 |
+
|
43 |
+
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
44 |
+
|
45 |
+
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
46 |
+
|
47 |
+
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
48 |
+
|
49 |
+
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
50 |
+
|
51 |
+
5. Commercial Use Limitation.
|
52 |
+
|
53 |
+
(a) The rights granted under this License for Commercial Use are conditioned upon You or Your Legal Entity not exceeding the Threshold.
|
54 |
+
|
55 |
+
(b) Any Commercial Use of the Work or a Derivative Work by a Legal Entity that exceeds the Threshold is not licensed under this Agreement.
|
56 |
+
|
57 |
+
(c) The Threshold shall not apply to a Qualified Non-Profit Organization's use of the Work or a Derivative Work for Non-Commercial or Research Purposes.
|
58 |
+
|
59 |
+
6. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
60 |
+
|
61 |
+
7. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except for the reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
62 |
+
|
63 |
+
8. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
64 |
+
|
65 |
+
9. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
66 |
+
|
67 |
+
10. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
68 |
+
|
69 |
+
11. Termination. This License will terminate automatically and immediately if You fail to comply with any of its terms and conditions. Upon termination, You must cease all use of the Work and any Derivative Works and delete all copies in Your possession.
|
70 |
+
|
71 |
+
END OF TERMS AND CONDITIONS
|
README.md
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: transformers
|
3 |
+
license: other
|
4 |
+
license_name: lfm1.0
|
5 |
+
license_link: LICENSE
|
6 |
+
language:
|
7 |
+
- en
|
8 |
+
- ar
|
9 |
+
- zh
|
10 |
+
- fr
|
11 |
+
- de
|
12 |
+
- ja
|
13 |
+
- ko
|
14 |
+
- es
|
15 |
+
pipeline_tag: text-generation
|
16 |
+
tags:
|
17 |
+
- liquid
|
18 |
+
- lfm2
|
19 |
+
- edge
|
20 |
+
---
|
21 |
+
|
22 |
+
<center>
|
23 |
+
<div style="text-align: center;">
|
24 |
+
<img
|
25 |
+
src="https://cdn-uploads.huggingface.co/production/uploads/61b8e2ba285851687028d395/7_6D7rWrLxp2hb6OHSV1p.png"
|
26 |
+
alt="Liquid AI"
|
27 |
+
style="width: 100%; max-width: 66%; height: auto; display: inline-block; margin-bottom: 0.5em; margin-top: 0.5em;"
|
28 |
+
/>
|
29 |
+
</div>
|
30 |
+
|
31 |
+
<a href="https://playground.liquid.ai/chat">
|
32 |
+
<svg width="114.8" height="20" viewBox="0 0 1300 200" xmlns="http://www.w3.org/2000/svg" role="img" aria-label="Liquid Playground" style="margin-bottom: 1em;">
|
33 |
+
<title>Liquid: Playground</title>
|
34 |
+
<g>
|
35 |
+
<rect fill="#fff" width="600" height="200"></rect>
|
36 |
+
<rect fill="url(#x)" x="600" width="700" height="200"></rect>
|
37 |
+
</g>
|
38 |
+
<g transform="translate(20, 30) scale(0.4, 0.4)">
|
39 |
+
<path d="M172.314 129.313L172.219 129.367L206.125 188.18C210.671 195.154 213.324 203.457 213.324 212.382C213.324 220.834 210.956 228.739 206.839 235.479L275.924 213.178L167.853 33.6L141.827 76.9614L172.314 129.313Z" fill="black"/>
|
40 |
+
<path d="M114.217 302.4L168.492 257.003C168.447 257.003 168.397 257.003 168.352 257.003C143.515 257.003 123.385 237.027 123.385 212.387C123.385 203.487 126.023 195.204 130.55 188.24L162.621 132.503L135.966 86.7327L60.0762 213.183L114.127 302.4H114.217Z" fill="black"/>
|
41 |
+
<path d="M191.435 250.681C191.435 250.681 191.43 250.681 191.425 250.686L129.71 302.4H221.294L267.71 226.593L191.435 250.686V250.681Z" fill="black"/>
|
42 |
+
</g>
|
43 |
+
<g aria-hidden="true" fill="#fff" text-anchor="start" font-family="Verdana,DejaVu Sans,sans-serif" font-size="110">
|
44 |
+
<text x="200" y="148" textLength="329" fill="#000" opacity="0.1">Liquid</text>
|
45 |
+
<text x="190" y="138" textLength="329" fill="#000">Liquid</text>
|
46 |
+
<text x="655" y="148" textLength="619" fill="#000" opacity="0.1">Playground</text>
|
47 |
+
<text x="645" y="138" textLength="619">Playground</text>
|
48 |
+
</g>
|
49 |
+
|
50 |
+
<linearGradient id="x" x1="0%" y1="0%" x2="100%" y2="0%">
|
51 |
+
<stop offset="0%" style="stop-color:#000000"></stop>
|
52 |
+
<stop offset="100%" style="stop-color:#000000"></stop>
|
53 |
+
</linearGradient>
|
54 |
+
</svg>
|
55 |
+
</a>
|
56 |
+
</center>
|
57 |
+
|
58 |
+
# LFM2-1.2B
|
59 |
+
|
60 |
+
LFM2 is a new generation of hybrid models developed by [Liquid AI](https://www.liquid.ai/), specifically designed for edge AI and on-device deployment. It sets a new standard in terms of quality, speed, and memory efficiency.
|
61 |
+
|
62 |
+
We're releasing the weights of three post-trained checkpoints with 350M, 700M, and 1.2B parameters. They provide the following key features to create AI-powered edge applications:
|
63 |
+
|
64 |
+
* **Fast training & inference** – LFM2 achieves 3x faster training compared to its previous generation. It also benefits from 2x faster decode and prefill speed on CPU compared to Qwen3.
|
65 |
+
* **Best performance** – LFM2 outperforms similarly-sized models across multiple benchmark categories, including knowledge, mathematics, instruction following, and multilingual capabilities.
|
66 |
+
* **New architecture** – LFM2 is a new hybrid Liquid model with multiplicative gates and short convolutions.
|
67 |
+
* **Flexible deployment** – LFM2 runs efficiently on CPU, GPU, and NPU hardware for flexible deployment on smartphones, laptops, or vehicles.
|
68 |
+
|
69 |
+
Find more information about LFM2 in our [blog post](https://www.liquid.ai/blog/liquid-foundation-models-v2-our-second-series-of-generative-ai-models).
|
70 |
+
|
71 |
+
## 📄 Model details
|
72 |
+
|
73 |
+
Due to their small size, **we recommend fine-tuning LFM2 models on narrow use cases** to maximize performance.
|
74 |
+
They are particularly suited for agentic tasks, data extraction, RAG, creative writing, and multi-turn conversations.
|
75 |
+
However, we do not recommend using them for tasks that are knowledge-intensive or require programming skills.
|
76 |
+
|
77 |
+
| Property | Value |
|
78 |
+
| ------------------- | ----------------------------- |
|
79 |
+
| **Parameters** | 1.2B |
|
80 |
+
| **Layers** | 16 (10 conv + 6 attn) |
|
81 |
+
| **Context length** | 32,768 tokens |
|
82 |
+
| **Vocabulary size** | 65,536 |
|
83 |
+
| **Precision** | bfloat16 |
|
84 |
+
| **Training budget** | 10 trillion tokens |
|
85 |
+
| **License** | LFM Open License v1.0 |
|
86 |
+
|
87 |
+
**Supported languages**: English, Arabic, Chinese, French, German, Japanese, Korean, and Spanish.
|
88 |
+
|
89 |
+
**Generation parameters**: We recommend the following parameters:
|
90 |
+
* `temperature=0.3`
|
91 |
+
* `min_p=0.15`
|
92 |
+
* `repetition_penalty=1.05`
|
93 |
+
|
94 |
+
**Chat template**: LFM2 uses a ChatML-like chat template as follows:
|
95 |
+
|
96 |
+
```
|
97 |
+
<|startoftext|><|im_start|>system
|
98 |
+
You are a helpful assistant trained by Liquid AI.<|im_end|>
|
99 |
+
<|im_start|>user
|
100 |
+
What is C. elegans?<|im_end|>
|
101 |
+
<|im_start|>assistant
|
102 |
+
It's a tiny nematode that lives in temperate soil environments.<|im_end|>
|
103 |
+
```
|
104 |
+
|
105 |
+
You can apply it using the dedicated [`.apply_chat_template()`](https://huggingface.co/docs/transformers/en/chat_templating#applychattemplate) function from Hugging Face transformers.
|
106 |
+
|
107 |
+
**Tool use**: It consists of four main steps:
|
108 |
+
1. **Function definition**: LFM2 takes JSON function definitions as input (JSON objects between `<|tool_list_start|>` and `<|tool_list_end|>` special tokens), usually in the system prompt
|
109 |
+
2. **Function call**: LFM2 writes Pythonic function calls (a Python list between `<|tool_call_start|>` and `<|tool_call_end|>` special tokens), as the assistant answer.
|
110 |
+
3. **Function execution**: The function call is executed and the result is returned (string between `<|tool_response_start|>` and `<|tool_response_end|>` special tokens), as a "tool" role.
|
111 |
+
4. **Final answer**: LFM2 interprets the outcome of the function call to address the original user prompt in plain text.
|
112 |
+
|
113 |
+
Here is a simple example of a conversation using tool use:
|
114 |
+
|
115 |
+
```
|
116 |
+
<|startoftext|><|im_start|>system
|
117 |
+
List of tools: <|tool_list_start|>[{"name": "get_candidate_status", "description": "Retrieves the current status of a candidate in the recruitment process", "parameters": {"type": "object", "properties": {"candidate_id": {"type": "string", "description": "Unique identifier for the candidate"}}, "required": ["candidate_id"]}}]<|tool_list_end|><|im_end|>
|
118 |
+
<|im_start|>user
|
119 |
+
What is the current status of candidate ID 12345?<|im_end|>
|
120 |
+
<|im_start|>assistant
|
121 |
+
<|tool_call_start|>[get_candidate_status(candidate_id="12345")]<|tool_call_end|>Checking the current status of candidate ID 12345.<|im_end|>
|
122 |
+
<|im_start|>tool
|
123 |
+
<|tool_response_start|>{"candidate_id": "12345", "status": "Interview Scheduled", "position": "Clinical Research Associate", "date": "2023-11-20"}<|tool_response_end|><|im_end|>
|
124 |
+
<|im_start|>assistant
|
125 |
+
The candidate with ID 12345 is currently in the "Interview Scheduled" stage for the position of Clinical Research Associate, with an interview date set for 2023-11-20.<|im_end|>
|
126 |
+
```
|
127 |
+
|
128 |
+
**Architecture**: Hybrid model with multiplicative gates and short convolutions: 10 double-gated short-range LIV convolution blocks and 6 grouped query attention (GQA) blocks.
|
129 |
+
|
130 |
+
**Pre-training mixture**: Approximately 75% English, 20% multilingual, and 5% code data sourced from the web and licensed materials.
|
131 |
+
|
132 |
+
**Training approach**:
|
133 |
+
* Knowledge distillation using [LFM1-7B](https://www.liquid.ai/blog/introducing-lfm-7b-setting-new-standards-for-efficient-language-models) as teacher model
|
134 |
+
* Very large-scale SFT on 50% downstream tasks, 50% general domains
|
135 |
+
* Custom DPO with length normalization and semi-online datasets
|
136 |
+
* Iterative model merging
|
137 |
+
|
138 |
+
## 🏃 How to run LFM2
|
139 |
+
|
140 |
+
> [!WARNING]
|
141 |
+
> ⚠️ Until LFM2 support is merged into the transformers library, it requires setting `trust_remote_code=True` when loading the model.
|
142 |
+
|
143 |
+
To run LFM2, you need Hugging Face [`transformers`](https://github.com/huggingface/transformers) v4.53.0.
|
144 |
+
You can update or install it with the following command: `pip install transformers==4.53.0`
|
145 |
+
|
146 |
+
Here is an example of how to generate an answer with transformers in Python:
|
147 |
+
|
148 |
+
```python
|
149 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
150 |
+
|
151 |
+
# Load model and tokenizer
|
152 |
+
model_id = "LiquidAI/LFM2-1.2B"
|
153 |
+
model = AutoModelForCausalLM.from_pretrained(
|
154 |
+
model_id,
|
155 |
+
device_map="auto",
|
156 |
+
torch_dtype="bfloat16",
|
157 |
+
trust_remote_code=True,
|
158 |
+
# attn_implementation="flash_attention_2" <- uncomment on compatible GPU
|
159 |
+
)
|
160 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
161 |
+
|
162 |
+
# Generate answer
|
163 |
+
prompt = "What is C. elegans?"
|
164 |
+
input_ids = tokenizer.apply_chat_template(
|
165 |
+
[{"role": "user", "content": prompt}],
|
166 |
+
add_generation_prompt=True,
|
167 |
+
return_tensors="pt",
|
168 |
+
tokenize=True,
|
169 |
+
).to(model.device)
|
170 |
+
|
171 |
+
output = model.generate(
|
172 |
+
input_ids,
|
173 |
+
do_sample=True,
|
174 |
+
temperature=0.3,
|
175 |
+
min_p=0.15,
|
176 |
+
repetition_penalty=1.05,
|
177 |
+
max_new_tokens=512,
|
178 |
+
)
|
179 |
+
|
180 |
+
print(tokenizer.decode(output[0], skip_special_tokens=False))
|
181 |
+
|
182 |
+
# <|startoftext|><|im_start|>user
|
183 |
+
# What is C. elegans?<|im_end|>
|
184 |
+
# <|im_start|>assistant
|
185 |
+
# C. elegans, also known as Caenorhabditis elegans, is a small, free-living
|
186 |
+
# nematode worm (roundworm) that belongs to the phylum Nematoda.
|
187 |
+
```
|
188 |
+
|
189 |
+
You can directly run and test the model with this [Colab notebook](https://colab.research.google.com/drive/1_q3jQ6LtyiuPzFZv7Vw8xSfPU5FwkKZY?usp=sharing).
|
190 |
+
|
191 |
+
## 🔧 How to fine-tune LFM2
|
192 |
+
|
193 |
+
We recommend fine-tuning LFM2 models on your use cases to maximize performance.
|
194 |
+
|
195 |
+
| Notebook | Description | Link |
|
196 |
+
|-------|------|------|
|
197 |
+
| SFT + LoRA | Supervised Fine-Tuning (SFT) notebook with a LoRA adapter in TRL. | <a href="https://colab.research.google.com/drive/1j5Hk_SyBb2soUsuhU0eIEA9GwLNRnElF?usp=sharing"><img src="https://cdn-uploads.huggingface.co/production/uploads/61b8e2ba285851687028d395/vlOyMEjwHa_b_LXysEu2E.png" width="120" alt="Colab link"></a> |
|
198 |
+
| DPO | Preference alignment with Direct Preference Optimization (DPO) in TRL. | <a href="https://colab.research.google.com/drive/1MQdsPxFHeZweGsNx4RH7Ia8lG8PiGE1t?usp=sharing"><img src="https://cdn-uploads.huggingface.co/production/uploads/61b8e2ba285851687028d395/vlOyMEjwHa_b_LXysEu2E.png" width="120" alt="Colab link"></a> |
|
199 |
+
|
200 |
+
## 📈 Performance
|
201 |
+
|
202 |
+
LFM2 outperforms similar-sized models across different evaluation categories.
|
203 |
+
|
204 |
+
### 1. Automated benchmarks
|
205 |
+
|
206 |
+

|
207 |
+
|
208 |
+
| Model | MMLU | GPQA | IFEval | IFBench | GSM8K | MGSM | MMMLU |
|
209 |
+
|-------|------|------|--------|---------|-------|------|-------|
|
210 |
+
| LFM2-350M | 43.43 | 27.46 | 65.12 | 16.41 | 30.1 | 29.52 | 37.99 |
|
211 |
+
| LFM2-700M | 49.9 | 28.48 | 72.23 | 20.56 | 46.4 | 45.36 | 43.28 |
|
212 |
+
| LFM2-1.2B | *55.23* | **31.47** | **74.89** | *20.7* | *58.3* | *55.04* | **46.73** |
|
213 |
+
| Qwen3-0.6B | 44.93 | 22.14 | 64.24 | 19.75 | 36.47 | 41.28 | 30.84 |
|
214 |
+
| Qwen3-1.7B | **59.11** | 27.72 | *73.98* | **21.27** | 51.4 | **66.56** | *46.51* |
|
215 |
+
| Llama-3.2-1B-Instruct | 46.6 | *28.84* | 52.39 | 16.86 | 35.71 | 29.12 | 38.15 |
|
216 |
+
| gemma-3-1b-it | 40.08 | 21.07 | 62.9 | 17.72 | **59.59** | 43.6 | 34.43 |
|
217 |
+
|
218 |
+
### 2. LLM-as-a-Judge
|
219 |
+
|
220 |
+

|
221 |
+

|
222 |
+
|
223 |
+
### 3. Inference
|
224 |
+
|
225 |
+
#### Throughput comparison on CPU in ExecuTorch
|
226 |
+
|
227 |
+

|
228 |
+
|
229 |
+
#### Throughput comparison on CPU in Llama.cpp
|
230 |
+
|
231 |
+

|
232 |
+
|
233 |
+
## 📬 Contact
|
234 |
+
|
235 |
+
If you are interested in custom solutions with edge deployment, please contact [our sales team](https://www.liquid.ai/contact).
|
chat_template.jinja
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '
|
2 |
+
' + message['content'] + '<|im_end|>' + '
|
3 |
+
'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant
|
4 |
+
' }}{% endif %}
|
config.json
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LFM2ForCausalLM"
|
4 |
+
],
|
5 |
+
"block_auto_adjust_ff_dim": true,
|
6 |
+
"block_dim": 2048,
|
7 |
+
"block_ff_dim": 12288,
|
8 |
+
"block_ffn_dim_multiplier": 1.0,
|
9 |
+
"block_mlp_init_scale": 1.0,
|
10 |
+
"block_multiple_of": 256,
|
11 |
+
"block_norm_eps": 1e-05,
|
12 |
+
"block_out_init_scale": 1.0,
|
13 |
+
"block_use_swiglu": true,
|
14 |
+
"block_use_xavier_init": true,
|
15 |
+
"bos_token_id": 1,
|
16 |
+
"conv_L_cache": 3,
|
17 |
+
"conv_bias": false,
|
18 |
+
"conv_dim": 2048,
|
19 |
+
"conv_dim_out": 2048,
|
20 |
+
"conv_use_xavier_init": true,
|
21 |
+
"eos_token_id": 7,
|
22 |
+
"full_attn_idxs": [
|
23 |
+
2,
|
24 |
+
5,
|
25 |
+
8,
|
26 |
+
10,
|
27 |
+
12,
|
28 |
+
14
|
29 |
+
],
|
30 |
+
"hidden_size": 2048,
|
31 |
+
"initializer_range": 0.02,
|
32 |
+
"max_position_embeddings": 128000,
|
33 |
+
"model_type": "lfm2",
|
34 |
+
"norm_eps": 1e-05,
|
35 |
+
"num_attention_heads": 32,
|
36 |
+
"num_heads": 32,
|
37 |
+
"num_hidden_layers": 16,
|
38 |
+
"num_key_value_heads": 8,
|
39 |
+
"pad_token_id": 0,
|
40 |
+
"rope_theta": 1000000.0,
|
41 |
+
"torch_dtype": "bfloat16",
|
42 |
+
"transformers_version": "4.53.0.dev0",
|
43 |
+
"use_cache": true,
|
44 |
+
"use_pos_enc": true,
|
45 |
+
"vocab_size": 65536,
|
46 |
+
"auto_map": {
|
47 |
+
"AutoConfig": "modeling_lfm2.LFM2Config",
|
48 |
+
"AutoModelForCausalLM": "modeling_lfm2.LFM2ForCausalLM"
|
49 |
+
}
|
50 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 7,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.53.0.dev0"
|
7 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:60fef6ef4481c533ce7427793bed50200b55b3c68d0d00c52bc56f207a9acecd
|
3 |
+
size 2340697936
|
modeling_lfm2.py
ADDED
@@ -0,0 +1,945 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any, Callable, ClassVar, Dict, List, Optional, Tuple, Union
|
2 |
+
|
3 |
+
import torch
|
4 |
+
import torch.nn as nn
|
5 |
+
import torch.nn.functional as F
|
6 |
+
from transformers.cache_utils import DynamicCache
|
7 |
+
from transformers.configuration_utils import PretrainedConfig
|
8 |
+
from transformers.generation import GenerationMixin
|
9 |
+
from transformers.masking_utils import create_causal_mask
|
10 |
+
from transformers.modeling_flash_attention_utils import FlashAttentionKwargs
|
11 |
+
from transformers.modeling_layers import GradientCheckpointingLayer
|
12 |
+
from transformers.modeling_outputs import (
|
13 |
+
BaseModelOutputWithPast,
|
14 |
+
CausalLMOutputWithPast,
|
15 |
+
)
|
16 |
+
from transformers.modeling_rope_utils import ROPE_INIT_FUNCTIONS, dynamic_rope_update
|
17 |
+
from transformers.modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
|
18 |
+
from transformers.processing_utils import Unpack
|
19 |
+
from transformers.utils import LossKwargs, auto_docstring, can_return_tuple, logging
|
20 |
+
from transformers.utils.import_utils import is_causal_conv1d_available
|
21 |
+
|
22 |
+
if is_causal_conv1d_available():
|
23 |
+
from causal_conv1d import causal_conv1d_fn, causal_conv1d_update
|
24 |
+
else:
|
25 |
+
causal_conv1d_fn, causal_conv1d_update = None, None
|
26 |
+
|
27 |
+
|
28 |
+
kernel_modules = (causal_conv1d_fn, causal_conv1d_update)
|
29 |
+
is_fast_path_available = all(kernel_modules)
|
30 |
+
|
31 |
+
logger = logging.get_logger(__name__)
|
32 |
+
|
33 |
+
|
34 |
+
# ========================================================
|
35 |
+
# Config Class (to be removed) once integrated into
|
36 |
+
# `transformers`. For now, allows for dynamic importing.
|
37 |
+
# ========================================================s
|
38 |
+
# from .configuration_lfm2 import LFM2Config
|
39 |
+
|
40 |
+
|
41 |
+
class LFM2Config(PretrainedConfig):
|
42 |
+
model_type = "lfm2"
|
43 |
+
keys_to_ignore_at_inference: ClassVar = ["past_key_values"]
|
44 |
+
|
45 |
+
def __init__(
|
46 |
+
self,
|
47 |
+
vocab_size: int = 65536,
|
48 |
+
hidden_size: int = 2560,
|
49 |
+
num_hidden_layers: int = 32,
|
50 |
+
pad_token_id: int = 0,
|
51 |
+
bos_token_id: int = 1,
|
52 |
+
eos_token_id: int = 2,
|
53 |
+
tie_embedding: bool = True,
|
54 |
+
theta: float = 1000000.0,
|
55 |
+
max_position_embeddings: int = 128_000,
|
56 |
+
use_cache: bool = True,
|
57 |
+
norm_eps: float = 0.00001,
|
58 |
+
initializer_range: float = 0.02,
|
59 |
+
num_attention_heads: int = 32,
|
60 |
+
num_key_value_heads: int = 8,
|
61 |
+
conv_bias: bool = False,
|
62 |
+
conv_dim: int = 2560,
|
63 |
+
conv_L_cache: int = 3,
|
64 |
+
block_dim: int = 2560,
|
65 |
+
block_ff_dim: int = 12288,
|
66 |
+
block_multiple_of: int = 256,
|
67 |
+
block_ffn_dim_multiplier: float = 1.0,
|
68 |
+
block_auto_adjust_ff_dim: bool = True,
|
69 |
+
full_attn_idxs: Optional[list[int]] = None,
|
70 |
+
**kwargs,
|
71 |
+
):
|
72 |
+
self.vocab_size = vocab_size
|
73 |
+
self.hidden_size = hidden_size
|
74 |
+
self.num_hidden_layers = num_hidden_layers
|
75 |
+
self.rope_theta = theta
|
76 |
+
self.max_position_embeddings = max_position_embeddings
|
77 |
+
self.use_cache = use_cache
|
78 |
+
self.norm_eps = norm_eps
|
79 |
+
self.initializer_range = initializer_range
|
80 |
+
|
81 |
+
# attn operator config
|
82 |
+
self.num_attention_heads = num_attention_heads
|
83 |
+
self.num_key_value_heads = num_key_value_heads
|
84 |
+
self.full_attn_idxs = full_attn_idxs
|
85 |
+
|
86 |
+
# custom operator config
|
87 |
+
self.conv_bias = conv_bias
|
88 |
+
self.conv_dim = conv_dim
|
89 |
+
self.conv_L_cache = conv_L_cache
|
90 |
+
|
91 |
+
# block config
|
92 |
+
self.block_dim = block_dim
|
93 |
+
self.block_ff_dim = block_ff_dim
|
94 |
+
self.block_multiple_of = block_multiple_of
|
95 |
+
self.block_ffn_dim_multiplier = block_ffn_dim_multiplier
|
96 |
+
self.block_auto_adjust_ff_dim = block_auto_adjust_ff_dim
|
97 |
+
|
98 |
+
super().__init__(
|
99 |
+
pad_token_id=pad_token_id,
|
100 |
+
bos_token_id=bos_token_id,
|
101 |
+
eos_token_id=eos_token_id,
|
102 |
+
tie_word_embeddings=tie_embedding,
|
103 |
+
**kwargs,
|
104 |
+
)
|
105 |
+
|
106 |
+
@property
|
107 |
+
def layers_block_type(self):
|
108 |
+
return [
|
109 |
+
"attention" if i in self.full_attn_idxs else "conv"
|
110 |
+
for i in range(self.num_hidden_layers)
|
111 |
+
]
|
112 |
+
|
113 |
+
|
114 |
+
class LFM2RMSNorm(torch.nn.Module):
|
115 |
+
def __init__(self, dim: int, eps: float = 1e-6):
|
116 |
+
super().__init__()
|
117 |
+
self.eps = eps
|
118 |
+
self.weight = nn.Parameter(torch.ones(dim))
|
119 |
+
|
120 |
+
def _norm(self, x):
|
121 |
+
return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps)
|
122 |
+
|
123 |
+
def forward(self, x):
|
124 |
+
output = self._norm(x.float())
|
125 |
+
return output.type_as(x) * self.weight
|
126 |
+
|
127 |
+
|
128 |
+
def rotate_half(x):
|
129 |
+
"""Rotates half the hidden dims of the input."""
|
130 |
+
x1 = x[..., : x.shape[-1] // 2]
|
131 |
+
x2 = x[..., x.shape[-1] // 2 :]
|
132 |
+
return torch.cat((-x2, x1), dim=-1)
|
133 |
+
|
134 |
+
|
135 |
+
def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1):
|
136 |
+
"""Applies Rotary Position Embedding to the query and key tensors."""
|
137 |
+
cos = cos.unsqueeze(unsqueeze_dim)
|
138 |
+
sin = sin.unsqueeze(unsqueeze_dim)
|
139 |
+
q_embed = (q * cos) + (rotate_half(q) * sin)
|
140 |
+
k_embed = (k * cos) + (rotate_half(k) * sin)
|
141 |
+
return q_embed, k_embed
|
142 |
+
|
143 |
+
|
144 |
+
class LFM2RotaryEmbedding(nn.Module):
|
145 |
+
def __init__(self, config: LFM2Config, device=None):
|
146 |
+
super().__init__()
|
147 |
+
# BC: "rope_type" was originally "type"
|
148 |
+
if hasattr(config, "rope_scaling") and config.rope_scaling is not None:
|
149 |
+
self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling.get("type"))
|
150 |
+
else:
|
151 |
+
self.rope_type = "default"
|
152 |
+
self.max_seq_len_cached = config.max_position_embeddings
|
153 |
+
self.original_max_seq_len = config.max_position_embeddings
|
154 |
+
|
155 |
+
self.config = config
|
156 |
+
self.rope_init_fn = ROPE_INIT_FUNCTIONS[self.rope_type]
|
157 |
+
|
158 |
+
inv_freq, self.attention_scaling = self.rope_init_fn(self.config, device)
|
159 |
+
self.register_buffer("inv_freq", inv_freq, persistent=False)
|
160 |
+
self.original_inv_freq = self.inv_freq
|
161 |
+
|
162 |
+
@torch.no_grad()
|
163 |
+
@dynamic_rope_update # power user: used with advanced RoPE types (e.g. dynamic rope)
|
164 |
+
def forward(self, x, position_ids):
|
165 |
+
inv_freq_expanded = self.inv_freq[None, :, None].float().expand(position_ids.shape[0], -1, 1).to(x.device)
|
166 |
+
position_ids_expanded = position_ids[:, None, :].float()
|
167 |
+
|
168 |
+
device_type = x.device.type if isinstance(x.device.type, str) and x.device.type != "mps" else "cpu"
|
169 |
+
with torch.autocast(device_type=device_type, enabled=False): # Force float32
|
170 |
+
freqs = (inv_freq_expanded.float() @ position_ids_expanded.float()).transpose(1, 2)
|
171 |
+
emb = torch.cat((freqs, freqs), dim=-1)
|
172 |
+
cos = emb.cos() * self.attention_scaling
|
173 |
+
sin = emb.sin() * self.attention_scaling
|
174 |
+
|
175 |
+
return cos.to(dtype=x.dtype), sin.to(dtype=x.dtype)
|
176 |
+
|
177 |
+
|
178 |
+
def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor:
|
179 |
+
"""
|
180 |
+
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
|
181 |
+
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
|
182 |
+
"""
|
183 |
+
batch, num_key_value_heads, slen, head_dim = hidden_states.shape
|
184 |
+
if n_rep == 1:
|
185 |
+
return hidden_states
|
186 |
+
hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim)
|
187 |
+
return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim)
|
188 |
+
|
189 |
+
|
190 |
+
def eager_attention_forward(
|
191 |
+
module: nn.Module,
|
192 |
+
query: torch.Tensor,
|
193 |
+
key: torch.Tensor,
|
194 |
+
value: torch.Tensor,
|
195 |
+
attention_mask: Optional[torch.Tensor],
|
196 |
+
scaling: float,
|
197 |
+
dropout: float = 0.0,
|
198 |
+
**kwargs,
|
199 |
+
):
|
200 |
+
num_key_value_groups = query.shape[1] // key.shape[1]
|
201 |
+
key_states = repeat_kv(key, num_key_value_groups)
|
202 |
+
value_states = repeat_kv(value, num_key_value_groups)
|
203 |
+
|
204 |
+
attn_weights = torch.matmul(query, key_states.transpose(2, 3)) * scaling
|
205 |
+
if attention_mask is not None:
|
206 |
+
causal_mask = attention_mask[:, :, :, : key_states.shape[-2]]
|
207 |
+
attn_weights = attn_weights + causal_mask
|
208 |
+
else:
|
209 |
+
seq_len = key_states.shape[-2]
|
210 |
+
causal_mask = torch.triu(
|
211 |
+
torch.full((seq_len, seq_len), float("-inf"), device=attn_weights.device),
|
212 |
+
diagonal=1,
|
213 |
+
)
|
214 |
+
attn_weights = attn_weights + causal_mask
|
215 |
+
|
216 |
+
attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query.dtype)
|
217 |
+
attn_weights = nn.functional.dropout(attn_weights, p=dropout, training=module.training)
|
218 |
+
attn_output = torch.matmul(attn_weights, value_states)
|
219 |
+
attn_output = attn_output.transpose(1, 2).contiguous()
|
220 |
+
|
221 |
+
return attn_output, attn_weights
|
222 |
+
|
223 |
+
|
224 |
+
class LFM2MLP(nn.Module):
|
225 |
+
def __init__(
|
226 |
+
self,
|
227 |
+
dim: int,
|
228 |
+
ff_dim: int,
|
229 |
+
multiple_of: int,
|
230 |
+
auto_adjust_ff_dim: bool,
|
231 |
+
ffn_dim_multiplier: Optional[float],
|
232 |
+
):
|
233 |
+
super().__init__()
|
234 |
+
if auto_adjust_ff_dim:
|
235 |
+
ff_dim = int(2 * ff_dim / 3)
|
236 |
+
# custom dim factor multiplier
|
237 |
+
if ffn_dim_multiplier is not None:
|
238 |
+
ff_dim = int(ffn_dim_multiplier * ff_dim)
|
239 |
+
ff_dim = multiple_of * ((ff_dim + multiple_of - 1) // multiple_of)
|
240 |
+
|
241 |
+
self.w1 = nn.Linear(dim, ff_dim, bias=False)
|
242 |
+
self.w3 = nn.Linear(dim, ff_dim, bias=False)
|
243 |
+
self.w2 = nn.Linear(ff_dim, dim, bias=False)
|
244 |
+
|
245 |
+
def forward(self, x):
|
246 |
+
return self.w2(F.silu(self.w1(x)) * self.w3(x))
|
247 |
+
|
248 |
+
|
249 |
+
class LFM2Cache(DynamicCache):
|
250 |
+
"""
|
251 |
+
Attention and conv cache for LFM2.
|
252 |
+
|
253 |
+
It stores the Key and Value states as a list of tensors, one for each layer.
|
254 |
+
Attention layer cache shape: `[batch_size, num_heads, seq_len, head_dim]`.
|
255 |
+
Conv layer cache shape: `[batch_size, conv_dim, L_cache-1]`.
|
256 |
+
"""
|
257 |
+
|
258 |
+
def __init__(
|
259 |
+
self,
|
260 |
+
config: LFM2Config,
|
261 |
+
max_batch_size: int,
|
262 |
+
dtype: torch.dtype = torch.float32,
|
263 |
+
device: Union[torch.device, str, None] = None,
|
264 |
+
):
|
265 |
+
super().__init__() # initialize key and value cache
|
266 |
+
self.max_batch_size = max_batch_size
|
267 |
+
self.full_attn_idxs = config.full_attn_idxs
|
268 |
+
self.conv_L_cache = config.conv_L_cache
|
269 |
+
self._dtype = dtype
|
270 |
+
|
271 |
+
self.conv_cache: List[torch.Tensor] = []
|
272 |
+
device = torch.device(device) if device is not None else None
|
273 |
+
|
274 |
+
for _ in range(config.num_hidden_layers):
|
275 |
+
conv_state = torch.zeros(
|
276 |
+
self.max_batch_size,
|
277 |
+
config.conv_dim,
|
278 |
+
self.conv_L_cache,
|
279 |
+
dtype=self._dtype,
|
280 |
+
device=device,
|
281 |
+
)
|
282 |
+
torch._dynamo.mark_static_address(conv_state)
|
283 |
+
self.conv_cache.append(conv_state)
|
284 |
+
|
285 |
+
def update(
|
286 |
+
self,
|
287 |
+
key_states: torch.Tensor,
|
288 |
+
value_states: torch.Tensor,
|
289 |
+
layer_idx: int,
|
290 |
+
cache_kwargs: Optional[Dict[str, Any]] = None,
|
291 |
+
) -> Tuple[torch.Tensor, torch.Tensor]:
|
292 |
+
"""
|
293 |
+
Updates the cache with the new `key_states` and `value_states` for the layer `layer_idx`.
|
294 |
+
|
295 |
+
Parameters:
|
296 |
+
key_states (`torch.Tensor`):
|
297 |
+
The new key states to cache.
|
298 |
+
value_states (`torch.Tensor`):
|
299 |
+
The new value states to cache.
|
300 |
+
layer_idx (`int`):
|
301 |
+
The index of the layer to cache the states for.
|
302 |
+
cache_kwargs (`Dict[str, Any]`, `optional`):
|
303 |
+
Additional arguments for the cache subclass. No additional arguments are used in `DynamicCache`.
|
304 |
+
|
305 |
+
Return:
|
306 |
+
A tuple containing the updated key and value states.
|
307 |
+
"""
|
308 |
+
# Update the number of seen tokens
|
309 |
+
# if layer_idx == 0:
|
310 |
+
if layer_idx == self.full_attn_idxs[0]:
|
311 |
+
self._seen_tokens += key_states.shape[-2]
|
312 |
+
|
313 |
+
# Update the cache
|
314 |
+
if key_states is not None:
|
315 |
+
if len(self.key_cache) <= layer_idx:
|
316 |
+
# There may be skipped layers, fill them with empty lists
|
317 |
+
for _ in range(len(self.key_cache), layer_idx):
|
318 |
+
self.key_cache.append(torch.tensor([]))
|
319 |
+
self.value_cache.append(torch.tensor([]))
|
320 |
+
self.key_cache.append(key_states)
|
321 |
+
self.value_cache.append(value_states)
|
322 |
+
elif (
|
323 |
+
not self.key_cache[layer_idx].numel() # prefers not t.numel() to len(t) == 0 to export the model
|
324 |
+
): # fills previously skipped layers; checking for tensor causes errors
|
325 |
+
self.key_cache[layer_idx] = key_states
|
326 |
+
self.value_cache[layer_idx] = value_states
|
327 |
+
else:
|
328 |
+
self.key_cache[layer_idx] = torch.cat([self.key_cache[layer_idx], key_states], dim=-2)
|
329 |
+
self.value_cache[layer_idx] = torch.cat([self.value_cache[layer_idx], value_states], dim=-2)
|
330 |
+
|
331 |
+
return self.key_cache[layer_idx], self.value_cache[layer_idx]
|
332 |
+
|
333 |
+
def reorder_cache(self, beam_idx: torch.LongTensor):
|
334 |
+
"""Reorders the cache for beam search, given the selected beam indices."""
|
335 |
+
for layer_idx in range(len(self.key_cache)):
|
336 |
+
device = self.key_cache[layer_idx].device
|
337 |
+
self.key_cache[layer_idx] = self.key_cache[layer_idx].index_select(0, beam_idx.to(device))
|
338 |
+
device = self.value_cache[layer_idx].device
|
339 |
+
self.value_cache[layer_idx] = self.value_cache[layer_idx].index_select(0, beam_idx.to(device))
|
340 |
+
|
341 |
+
device = self.conv_cache[layer_idx].device
|
342 |
+
self.conv_cache[layer_idx] = self.conv_cache[layer_idx].index_select(0, beam_idx.to(device))
|
343 |
+
|
344 |
+
def get_seq_length(self, layer_idx: Optional[int] = 0) -> int:
|
345 |
+
"""Returns the sequence length of the cached states. A layer index can be optionally passed."""
|
346 |
+
# take any layer that contains cache and not empty tensor
|
347 |
+
layer_idx = self.full_attn_idxs[0] if layer_idx not in self.full_attn_idxs else layer_idx
|
348 |
+
if len(self.key_cache) <= layer_idx or self.key_cache[layer_idx].numel() == 0:
|
349 |
+
return 0
|
350 |
+
return self.key_cache[layer_idx].shape[-2]
|
351 |
+
|
352 |
+
def reset(self):
|
353 |
+
for layer_idx in range(len(self.conv_cache)):
|
354 |
+
# In-place ops prevent breaking the static address
|
355 |
+
self.conv_cache[layer_idx].zero_()
|
356 |
+
|
357 |
+
|
358 |
+
class LFM2Attention(nn.Module):
|
359 |
+
def __init__(self, config: LFM2Config, layer_idx: Optional[int] = None, **kwargs):
|
360 |
+
super().__init__()
|
361 |
+
self.config = config
|
362 |
+
self.layer_idx = layer_idx
|
363 |
+
if layer_idx is None:
|
364 |
+
logger.warning_once(
|
365 |
+
f"Instantiating {self.__class__.__name__} without passing a `layer_idx` is not recommended and "
|
366 |
+
"will lead to errors during the forward call if caching is used. Please make sure to provide a "
|
367 |
+
"`layer_idx` when creating this class."
|
368 |
+
)
|
369 |
+
self.head_dim = config.hidden_size // config.num_attention_heads
|
370 |
+
self.num_key_value_heads = config.num_key_value_heads
|
371 |
+
self.num_key_value_groups = config.num_attention_heads // config.num_key_value_heads
|
372 |
+
self.scaling = self.head_dim**-0.5
|
373 |
+
self.is_causal = True
|
374 |
+
|
375 |
+
self.q_layernorm = LFM2RMSNorm(self.head_dim, eps=config.norm_eps)
|
376 |
+
self.k_layernorm = LFM2RMSNorm(self.head_dim, eps=config.norm_eps)
|
377 |
+
|
378 |
+
self.q_proj = nn.Linear(
|
379 |
+
config.hidden_size, config.num_attention_heads * self.head_dim, bias=False
|
380 |
+
)
|
381 |
+
self.k_proj = nn.Linear(
|
382 |
+
config.hidden_size, config.num_key_value_heads * self.head_dim, bias=False
|
383 |
+
)
|
384 |
+
self.v_proj = nn.Linear(
|
385 |
+
config.hidden_size, config.num_key_value_heads * self.head_dim, bias=False
|
386 |
+
)
|
387 |
+
self.out_proj = nn.Linear(
|
388 |
+
config.num_attention_heads * self.head_dim, config.hidden_size, bias=False
|
389 |
+
)
|
390 |
+
|
391 |
+
def forward(
|
392 |
+
self,
|
393 |
+
hidden_states: torch.Tensor,
|
394 |
+
position_embeddings: tuple[torch.Tensor, torch.Tensor],
|
395 |
+
attention_mask: Optional[torch.Tensor],
|
396 |
+
past_key_value: Optional[LFM2Cache] = None,
|
397 |
+
cache_position: Optional[torch.LongTensor] = None,
|
398 |
+
**kwargs,
|
399 |
+
) -> tuple[torch.Tensor, Optional[torch.Tensor], Optional[tuple[torch.Tensor]]]:
|
400 |
+
input_shape = hidden_states.shape[:-1]
|
401 |
+
hidden_shape = (*input_shape, -1, self.head_dim)
|
402 |
+
|
403 |
+
q = self.q_layernorm(self.q_proj(hidden_states).view(*hidden_shape)).transpose(1, 2)
|
404 |
+
k = self.k_layernorm(self.k_proj(hidden_states).view(*hidden_shape)).transpose(1, 2)
|
405 |
+
v = self.v_proj(hidden_states).view(*hidden_shape).transpose(1, 2)
|
406 |
+
|
407 |
+
cos, sin = position_embeddings
|
408 |
+
q, k = apply_rotary_pos_emb(q, k, cos, sin)
|
409 |
+
|
410 |
+
if past_key_value is not None:
|
411 |
+
cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
|
412 |
+
k, v = past_key_value.update(key_states=k, value_states=v, layer_idx=self.layer_idx, cache_kwargs=cache_kwargs)
|
413 |
+
|
414 |
+
attention_interface: Callable = eager_attention_forward
|
415 |
+
if self.config._attn_implementation != "eager":
|
416 |
+
attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
|
417 |
+
|
418 |
+
attn_output, attn_weights = attention_interface(
|
419 |
+
self,
|
420 |
+
q,
|
421 |
+
k,
|
422 |
+
v,
|
423 |
+
attention_mask,
|
424 |
+
dropout=0.0,
|
425 |
+
scaling=self.scaling,
|
426 |
+
**kwargs,
|
427 |
+
)
|
428 |
+
attn_output = attn_output.reshape(*input_shape, -1).contiguous()
|
429 |
+
output = self.out_proj(attn_output)
|
430 |
+
return output, attn_weights
|
431 |
+
|
432 |
+
|
433 |
+
class LFM2ShortConv(nn.Module):
|
434 |
+
def __init__(
|
435 |
+
self,
|
436 |
+
config: LFM2Config,
|
437 |
+
dim: int,
|
438 |
+
layer_idx: int,
|
439 |
+
):
|
440 |
+
super().__init__()
|
441 |
+
self.config = config
|
442 |
+
self.layer_idx = layer_idx
|
443 |
+
self.L_cache = config.conv_L_cache
|
444 |
+
self.bias = config.conv_bias
|
445 |
+
|
446 |
+
self.conv = nn.Conv1d(
|
447 |
+
in_channels=dim,
|
448 |
+
out_channels=dim,
|
449 |
+
kernel_size=self.L_cache,
|
450 |
+
groups=dim,
|
451 |
+
bias=self.bias,
|
452 |
+
padding=self.L_cache - 1,
|
453 |
+
)
|
454 |
+
self.in_proj = nn.Linear(dim, 3 * dim, bias=self.bias)
|
455 |
+
self.out_proj = nn.Linear(dim, dim, bias=self.bias)
|
456 |
+
|
457 |
+
def cuda_kernels_forward(
|
458 |
+
self,
|
459 |
+
x: torch.Tensor,
|
460 |
+
cache_params: Optional[LFM2Cache] = None,
|
461 |
+
cache_position: Optional[torch.LongTensor] = None,
|
462 |
+
attention_mask: Optional[torch.Tensor] = None,
|
463 |
+
):
|
464 |
+
BCx = self.in_proj(x).transpose(-1, -2)
|
465 |
+
B, C, x = BCx.chunk(3, dim=-2)
|
466 |
+
|
467 |
+
Bx = B * x
|
468 |
+
|
469 |
+
conv_weights = self.conv.weight.view(self.conv.weight.size(0), self.conv.weight.size(2))
|
470 |
+
if cache_params is not None and cache_position[0] > 0:
|
471 |
+
conv_out = causal_conv1d_update(
|
472 |
+
Bx.squeeze(-1),
|
473 |
+
cache_params.conv_cache[self.layer_idx],
|
474 |
+
conv_weights,
|
475 |
+
self.conv.bias,
|
476 |
+
None,
|
477 |
+
)
|
478 |
+
conv_out = conv_out.unsqueeze(-1)
|
479 |
+
else:
|
480 |
+
if cache_params is not None:
|
481 |
+
conv_state = nn.functional.pad(
|
482 |
+
Bx,
|
483 |
+
(self.L_cache - Bx.shape[-1], 0)
|
484 |
+
)
|
485 |
+
cache_params.conv_cache[self.layer_idx].copy_(conv_state)
|
486 |
+
|
487 |
+
conv_out = causal_conv1d_fn(Bx, conv_weights, self.conv.bias, activation=None)
|
488 |
+
|
489 |
+
y = C * conv_out
|
490 |
+
y = self.out_proj(y.transpose(-1, -2).contiguous())
|
491 |
+
return y
|
492 |
+
|
493 |
+
def slow_forward(
|
494 |
+
self,
|
495 |
+
x: torch.Tensor,
|
496 |
+
cache_params: Optional[LFM2Cache] = None,
|
497 |
+
cache_position: Optional[torch.LongTensor] = None,
|
498 |
+
attention_mask: Optional[torch.Tensor] = None,
|
499 |
+
):
|
500 |
+
seqlen = x.shape[1]
|
501 |
+
BCx = self.in_proj(x).transpose(-1, -2)
|
502 |
+
B, C, x = BCx.chunk(3, dim=-2)
|
503 |
+
|
504 |
+
Bx = B * x
|
505 |
+
|
506 |
+
if cache_params is not None and cache_position[0] > 0:
|
507 |
+
conv_state = cache_params.conv_cache[self.layer_idx]
|
508 |
+
cache_position = cache_position.clamp(0, self.L_cache - 1)
|
509 |
+
conv_state = conv_state.roll(shifts=-1, dims=-1)
|
510 |
+
conv_state[:, :, cache_position] = Bx.to(device=conv_state.device, dtype=conv_state.dtype)
|
511 |
+
cache_params.conv_cache[self.layer_idx].copy_(conv_state)
|
512 |
+
conv_out = torch.sum(conv_state.to(Bx.device) * self.conv.weight[:, 0, :], dim=-1)
|
513 |
+
if self.bias:
|
514 |
+
conv_out += self.conv.bias
|
515 |
+
|
516 |
+
conv_out = conv_out.unsqueeze(-1)
|
517 |
+
else:
|
518 |
+
if cache_params is not None:
|
519 |
+
conv_state = nn.functional.pad(
|
520 |
+
Bx,
|
521 |
+
(self.L_cache - Bx.shape[-1], 0)
|
522 |
+
)
|
523 |
+
cache_params.conv_cache[self.layer_idx].copy_(conv_state)
|
524 |
+
|
525 |
+
conv_out = self.conv(Bx)[..., :seqlen]
|
526 |
+
|
527 |
+
y = C * conv_out
|
528 |
+
y = y.transpose(-1, -2).contiguous()
|
529 |
+
y = self.out_proj(y)
|
530 |
+
return y
|
531 |
+
|
532 |
+
|
533 |
+
def forward(
|
534 |
+
self,
|
535 |
+
x: torch.Tensor,
|
536 |
+
cache_params: Optional[LFM2Cache] = None,
|
537 |
+
cache_position: Optional[torch.LongTensor] = None,
|
538 |
+
attention_mask: Optional[torch.Tensor] = None,
|
539 |
+
):
|
540 |
+
if is_fast_path_available and "cuda" in x.device.type and not torch._dynamo.is_compiling():
|
541 |
+
return self.cuda_kernels_forward(x, cache_params, cache_position, attention_mask)
|
542 |
+
return self.slow_forward(x, cache_params, cache_position, attention_mask)
|
543 |
+
|
544 |
+
|
545 |
+
class LFM2AttentionDecoderLayer(GradientCheckpointingLayer):
|
546 |
+
def __init__(self, config: LFM2Config, layer_idx: int):
|
547 |
+
super().__init__()
|
548 |
+
self.self_attn = LFM2Attention(config, layer_idx)
|
549 |
+
self.feed_forward = LFM2MLP(
|
550 |
+
dim=config.block_dim,
|
551 |
+
ff_dim=config.block_ff_dim,
|
552 |
+
multiple_of=config.block_multiple_of,
|
553 |
+
auto_adjust_ff_dim=config.block_auto_adjust_ff_dim,
|
554 |
+
ffn_dim_multiplier=config.block_ffn_dim_multiplier,
|
555 |
+
)
|
556 |
+
self.operator_norm = LFM2RMSNorm(config.hidden_size, eps=config.norm_eps)
|
557 |
+
self.ffn_norm = LFM2RMSNorm(config.hidden_size, eps=config.norm_eps)
|
558 |
+
|
559 |
+
def forward(
|
560 |
+
self,
|
561 |
+
hidden_states: torch.Tensor,
|
562 |
+
position_embeddings: tuple[torch.Tensor, torch.Tensor],
|
563 |
+
attention_mask: Optional[torch.Tensor] = None,
|
564 |
+
position_ids: Optional[torch.LongTensor] = None,
|
565 |
+
past_key_value: Optional[tuple[torch.Tensor]] = None,
|
566 |
+
output_attentions: Optional[bool] = False,
|
567 |
+
cache_position: Optional[torch.LongTensor] = None,
|
568 |
+
**kwargs,
|
569 |
+
) -> tuple[torch.FloatTensor, Optional[tuple[torch.FloatTensor, torch.FloatTensor]]]:
|
570 |
+
h, self_attn_weights = self.self_attn(
|
571 |
+
hidden_states=self.operator_norm(hidden_states),
|
572 |
+
position_embeddings=position_embeddings,
|
573 |
+
attention_mask=attention_mask,
|
574 |
+
position_ids=position_ids,
|
575 |
+
past_key_value=past_key_value,
|
576 |
+
cache_position=cache_position,
|
577 |
+
**kwargs,
|
578 |
+
)
|
579 |
+
h += hidden_states
|
580 |
+
out = h + self.feed_forward.forward(self.ffn_norm(h))
|
581 |
+
|
582 |
+
outputs = (out,)
|
583 |
+
if output_attentions:
|
584 |
+
outputs += (self_attn_weights,)
|
585 |
+
|
586 |
+
return outputs
|
587 |
+
|
588 |
+
|
589 |
+
class LFM2ShortConvDecoderLayer(GradientCheckpointingLayer):
|
590 |
+
def __init__(self, config: LFM2Config, layer_idx: int):
|
591 |
+
super().__init__()
|
592 |
+
self.conv = LFM2ShortConv(
|
593 |
+
config=config,
|
594 |
+
dim=config.conv_dim,
|
595 |
+
layer_idx=layer_idx,
|
596 |
+
)
|
597 |
+
self.feed_forward = LFM2MLP(
|
598 |
+
dim=config.block_dim,
|
599 |
+
ff_dim=config.block_ff_dim,
|
600 |
+
multiple_of=config.block_multiple_of,
|
601 |
+
auto_adjust_ff_dim=config.block_auto_adjust_ff_dim,
|
602 |
+
ffn_dim_multiplier=config.block_ffn_dim_multiplier,
|
603 |
+
)
|
604 |
+
self.operator_norm = LFM2RMSNorm(config.hidden_size, eps=config.norm_eps)
|
605 |
+
self.ffn_norm = LFM2RMSNorm(config.hidden_size, eps=config.norm_eps)
|
606 |
+
|
607 |
+
def forward(
|
608 |
+
self,
|
609 |
+
hidden_states: torch.Tensor,
|
610 |
+
past_key_value: Optional[LFM2Cache] = None,
|
611 |
+
cache_position: Optional[torch.LongTensor] = None,
|
612 |
+
attention_mask: Optional[torch.Tensor] = None,
|
613 |
+
output_attentions: Optional[bool] = False,
|
614 |
+
**kwargs,
|
615 |
+
) -> tuple[torch.FloatTensor, Optional[tuple[torch.FloatTensor, torch.FloatTensor]]]:
|
616 |
+
h = self.conv(
|
617 |
+
self.operator_norm(hidden_states),
|
618 |
+
cache_params=past_key_value,
|
619 |
+
cache_position=cache_position,
|
620 |
+
attention_mask=attention_mask,
|
621 |
+
)
|
622 |
+
self_attn_weights = None
|
623 |
+
|
624 |
+
h += hidden_states
|
625 |
+
out = h + self.feed_forward.forward(self.ffn_norm(h))
|
626 |
+
|
627 |
+
outputs = (out,)
|
628 |
+
if output_attentions:
|
629 |
+
outputs += (self_attn_weights,)
|
630 |
+
|
631 |
+
return outputs
|
632 |
+
|
633 |
+
|
634 |
+
@auto_docstring
|
635 |
+
class LFM2PretrainedModel(PreTrainedModel):
|
636 |
+
config_class = LFM2Config
|
637 |
+
base_model_prefix = "model"
|
638 |
+
supports_gradient_checkpointing = True
|
639 |
+
_no_split_modules: ClassVar = ["LFM2AttentionDecoderLayer", "LFM2ShortConvDecoderLayer"]
|
640 |
+
_skip_keys_device_placement = "past_key_values"
|
641 |
+
_supports_flash_attn_2 = True
|
642 |
+
_supports_sdpa = True
|
643 |
+
_supports_flex_attn = True
|
644 |
+
_supports_cache_class = True
|
645 |
+
_supports_quantized_cache = True
|
646 |
+
_supports_static_cache = True
|
647 |
+
_supports_attention_backend = True
|
648 |
+
|
649 |
+
def _init_weights(self, module):
|
650 |
+
std = self.config.initializer_range
|
651 |
+
if isinstance(module, (nn.Linear, nn.Conv1d)):
|
652 |
+
module.weight.data.normal_(mean=0.0, std=std)
|
653 |
+
if module.bias is not None:
|
654 |
+
module.bias.data.zero_()
|
655 |
+
elif isinstance(module, nn.Embedding):
|
656 |
+
module.weight.data.normal_(mean=0.0, std=std)
|
657 |
+
if module.padding_idx is not None:
|
658 |
+
module.weight.data[module.padding_idx].zero_()
|
659 |
+
elif isinstance(module, LFM2RMSNorm):
|
660 |
+
module.weight.data.fill_(1.0)
|
661 |
+
|
662 |
+
|
663 |
+
class LFM2Model(LFM2PretrainedModel):
|
664 |
+
def __init__(self, config: LFM2Config):
|
665 |
+
super().__init__(config)
|
666 |
+
self.padding_idx = config.pad_token_id
|
667 |
+
self.vocab_size = config.vocab_size
|
668 |
+
|
669 |
+
self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx)
|
670 |
+
|
671 |
+
self.pos_emb = LFM2RotaryEmbedding(config)
|
672 |
+
|
673 |
+
decoder_layers = []
|
674 |
+
for i in range(config.num_hidden_layers):
|
675 |
+
if i in config.full_attn_idxs:
|
676 |
+
decoder_layers.append(LFM2AttentionDecoderLayer(config, layer_idx=i))
|
677 |
+
else:
|
678 |
+
decoder_layers.append(LFM2ShortConvDecoderLayer(config, layer_idx=i))
|
679 |
+
self.layers = nn.ModuleList(decoder_layers)
|
680 |
+
|
681 |
+
self.embedding_norm = LFM2RMSNorm(config.hidden_size, eps=config.norm_eps)
|
682 |
+
|
683 |
+
self.gradient_checkpointing = False
|
684 |
+
|
685 |
+
# Initialize weights and apply final processing
|
686 |
+
self.post_init()
|
687 |
+
|
688 |
+
def get_input_embeddings(self):
|
689 |
+
return self.embed_tokens
|
690 |
+
|
691 |
+
def set_input_embeddings(self, value):
|
692 |
+
self.embed_tokens = value
|
693 |
+
|
694 |
+
@can_return_tuple
|
695 |
+
@auto_docstring
|
696 |
+
def forward(
|
697 |
+
self,
|
698 |
+
input_ids: torch.LongTensor = None,
|
699 |
+
attention_mask: Optional[torch.Tensor] = None,
|
700 |
+
position_ids: Optional[torch.LongTensor] = None,
|
701 |
+
past_key_values: Optional[LFM2Cache] = None,
|
702 |
+
inputs_embeds: Optional[torch.FloatTensor] = None,
|
703 |
+
use_cache: Optional[bool] = None,
|
704 |
+
output_attentions: Optional[bool] = None,
|
705 |
+
output_hidden_states: Optional[bool] = None,
|
706 |
+
return_dict: Optional[bool] = None,
|
707 |
+
cache_position: Optional[torch.LongTensor] = None,
|
708 |
+
**flash_attn_kwargs: Unpack[FlashAttentionKwargs],
|
709 |
+
) -> BaseModelOutputWithPast:
|
710 |
+
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
711 |
+
output_hidden_states = (
|
712 |
+
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
|
713 |
+
)
|
714 |
+
use_cache = use_cache if use_cache is not None else self.config.use_cache
|
715 |
+
|
716 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
717 |
+
|
718 |
+
if (input_ids is None) ^ (inputs_embeds is not None):
|
719 |
+
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")
|
720 |
+
|
721 |
+
if self.gradient_checkpointing and self.training and use_cache:
|
722 |
+
logger.warning_once(
|
723 |
+
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
|
724 |
+
)
|
725 |
+
use_cache = False
|
726 |
+
|
727 |
+
if inputs_embeds is None:
|
728 |
+
inputs_embeds = self.embed_tokens(input_ids)
|
729 |
+
|
730 |
+
if use_cache and past_key_values is None:
|
731 |
+
batch_size = inputs_embeds.shape[0]
|
732 |
+
past_key_values = LFM2Cache(
|
733 |
+
config=self.config, max_batch_size=batch_size, dtype=self.dtype, device=self.device
|
734 |
+
)
|
735 |
+
|
736 |
+
if cache_position is None:
|
737 |
+
past_seen_tokens = past_key_values.get_seq_length() if past_key_values is not None else 0
|
738 |
+
cache_position = torch.arange(
|
739 |
+
past_seen_tokens, past_seen_tokens + inputs_embeds.shape[1], device=inputs_embeds.device
|
740 |
+
)
|
741 |
+
|
742 |
+
if position_ids is None:
|
743 |
+
position_ids = cache_position.unsqueeze(0)
|
744 |
+
|
745 |
+
causal_mask = create_causal_mask(
|
746 |
+
config=self.config,
|
747 |
+
input_embeds=inputs_embeds,
|
748 |
+
attention_mask=attention_mask,
|
749 |
+
cache_position=cache_position,
|
750 |
+
past_key_values=past_key_values,
|
751 |
+
)
|
752 |
+
hidden_states = inputs_embeds
|
753 |
+
|
754 |
+
position_embeddings = self.pos_emb(hidden_states, position_ids)
|
755 |
+
|
756 |
+
# decoder layers
|
757 |
+
all_hidden_states = () if output_hidden_states else None
|
758 |
+
all_self_attns = () if output_attentions else None
|
759 |
+
for decoder_layer in self.layers:
|
760 |
+
if output_hidden_states:
|
761 |
+
all_hidden_states += (hidden_states,)
|
762 |
+
|
763 |
+
layer_outputs = decoder_layer(
|
764 |
+
hidden_states,
|
765 |
+
attention_mask=causal_mask,
|
766 |
+
position_ids=position_ids,
|
767 |
+
past_key_value=past_key_values,
|
768 |
+
output_attentions=output_attentions,
|
769 |
+
use_cache=use_cache,
|
770 |
+
cache_position=cache_position,
|
771 |
+
position_embeddings=position_embeddings,
|
772 |
+
**flash_attn_kwargs,
|
773 |
+
)
|
774 |
+
|
775 |
+
hidden_states = layer_outputs[0]
|
776 |
+
|
777 |
+
if output_attentions:
|
778 |
+
all_self_attns += (layer_outputs[1],)
|
779 |
+
|
780 |
+
hidden_states = self.embedding_norm(hidden_states)
|
781 |
+
|
782 |
+
# add hidden states from the last decoder layer
|
783 |
+
if output_hidden_states:
|
784 |
+
all_hidden_states += (hidden_states,)
|
785 |
+
|
786 |
+
output = BaseModelOutputWithPast(
|
787 |
+
last_hidden_state=hidden_states,
|
788 |
+
past_key_values=past_key_values if use_cache else None,
|
789 |
+
hidden_states=all_hidden_states,
|
790 |
+
attentions=all_self_attns,
|
791 |
+
)
|
792 |
+
return output if return_dict else output.to_tuple()
|
793 |
+
|
794 |
+
|
795 |
+
class KwargsForCausalLM(FlashAttentionKwargs, LossKwargs): ...
|
796 |
+
|
797 |
+
|
798 |
+
@auto_docstring
|
799 |
+
class LFM2ForCausalLM(LFM2PretrainedModel, GenerationMixin):
|
800 |
+
_tied_weights_keys = ["lm_head.weight"]
|
801 |
+
|
802 |
+
def __init__(self, config: LFM2Config):
|
803 |
+
super().__init__(config)
|
804 |
+
self.model = LFM2Model(config)
|
805 |
+
self.vocab_size = config.vocab_size
|
806 |
+
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
|
807 |
+
self.post_init()
|
808 |
+
|
809 |
+
def get_input_embeddings(self):
|
810 |
+
return self.model.embed_tokens
|
811 |
+
|
812 |
+
def set_input_embeddings(self, value):
|
813 |
+
self.model.embed_tokens = value
|
814 |
+
|
815 |
+
def get_output_embeddings(self):
|
816 |
+
return self.lm_head
|
817 |
+
|
818 |
+
def set_output_embeddings(self, new_embeddings):
|
819 |
+
self.lm_head = new_embeddings
|
820 |
+
|
821 |
+
def set_decoder(self, decoder):
|
822 |
+
self.model = decoder
|
823 |
+
|
824 |
+
def get_decoder(self):
|
825 |
+
return self.model
|
826 |
+
|
827 |
+
def forward(
|
828 |
+
self,
|
829 |
+
input_ids: torch.LongTensor = None,
|
830 |
+
attention_mask: Optional[torch.Tensor] = None,
|
831 |
+
position_ids: Optional[torch.LongTensor] = None,
|
832 |
+
past_key_values: Optional[LFM2Cache] = None,
|
833 |
+
inputs_embeds: Optional[torch.FloatTensor] = None,
|
834 |
+
labels: Optional[torch.LongTensor] = None,
|
835 |
+
use_cache: Optional[bool] = None,
|
836 |
+
output_attentions: Optional[bool] = None,
|
837 |
+
output_hidden_states: Optional[bool] = None,
|
838 |
+
return_dict: Optional[bool] = None,
|
839 |
+
cache_position: Optional[torch.LongTensor] = None,
|
840 |
+
logits_to_keep: Union[int, torch.Tensor] = 0,
|
841 |
+
**kwargs: Unpack[KwargsForCausalLM],
|
842 |
+
) -> Union[tuple, CausalLMOutputWithPast]:
|
843 |
+
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
844 |
+
output_hidden_states = (
|
845 |
+
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
|
846 |
+
)
|
847 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
848 |
+
|
849 |
+
outputs: BaseModelOutputWithPast = self.model(
|
850 |
+
input_ids=input_ids,
|
851 |
+
attention_mask=attention_mask,
|
852 |
+
position_ids=position_ids,
|
853 |
+
past_key_values=past_key_values,
|
854 |
+
inputs_embeds=inputs_embeds,
|
855 |
+
use_cache=use_cache,
|
856 |
+
output_attentions=output_attentions,
|
857 |
+
output_hidden_states=output_hidden_states,
|
858 |
+
cache_position=cache_position,
|
859 |
+
return_dict=return_dict,
|
860 |
+
**kwargs,
|
861 |
+
)
|
862 |
+
|
863 |
+
hidden_states = outputs.last_hidden_state
|
864 |
+
# Only compute necessary logits, and do not upcast them to float if we are not computing the loss
|
865 |
+
slice_indices = slice(-logits_to_keep, None) if isinstance(logits_to_keep, int) else logits_to_keep
|
866 |
+
logits = self.lm_head(hidden_states[:, slice_indices, :])
|
867 |
+
|
868 |
+
loss = None
|
869 |
+
if labels is not None:
|
870 |
+
loss = self.loss_function(logits=logits, labels=labels, vocab_size=self.config.vocab_size, **kwargs)
|
871 |
+
|
872 |
+
if not return_dict:
|
873 |
+
output = (logits,) + outputs[1:]
|
874 |
+
return (loss,) + output if loss is not None else output
|
875 |
+
|
876 |
+
return CausalLMOutputWithPast(
|
877 |
+
loss=loss,
|
878 |
+
logits=logits,
|
879 |
+
past_key_values=outputs.past_key_values,
|
880 |
+
hidden_states=outputs.hidden_states,
|
881 |
+
attentions=outputs.attentions,
|
882 |
+
)
|
883 |
+
|
884 |
+
def prepare_inputs_for_generation(
|
885 |
+
self,
|
886 |
+
input_ids,
|
887 |
+
past_key_values=None,
|
888 |
+
attention_mask=None,
|
889 |
+
inputs_embeds=None,
|
890 |
+
cache_position=None,
|
891 |
+
position_ids=None,
|
892 |
+
use_cache=True,
|
893 |
+
**kwargs,
|
894 |
+
):
|
895 |
+
# Overwritten -- Support custom LFM2Cache.
|
896 |
+
|
897 |
+
empty_past_kv = past_key_values is None or (
|
898 |
+
isinstance(past_key_values, DynamicCache) and past_key_values._seen_tokens == 0
|
899 |
+
)
|
900 |
+
|
901 |
+
# Omit tokens covered by past_key_values.
|
902 |
+
if not empty_past_kv:
|
903 |
+
# If we have cache: let's slice `input_ids` through `cache_position`, to keep only the unprocessed tokens
|
904 |
+
# Exception 1: when passing input_embeds, input_ids may be missing entries
|
905 |
+
# Exception 2: some generation methods do special slicing of input_ids, so we don't need to do it here
|
906 |
+
# Exception 3: with synced GPUs cache_position may go out of bounds, but we only want dummy token in that case.
|
907 |
+
# (we can't check exception 3 while compiling)
|
908 |
+
if (
|
909 |
+
inputs_embeds is not None # Exception 1
|
910 |
+
or cache_position[-1] >= input_ids.shape[1] # Exception 3
|
911 |
+
):
|
912 |
+
input_ids = input_ids[:, -cache_position.shape[0] :]
|
913 |
+
elif (
|
914 |
+
input_ids.shape[1] != cache_position.shape[0]
|
915 |
+
): # Default case (the "else", a no op, is Exception 2)
|
916 |
+
input_ids = input_ids[:, cache_position]
|
917 |
+
else:
|
918 |
+
past_key_values = LFM2Cache(self.config, input_ids.shape[0], dtype=self.dtype, device=self.device)
|
919 |
+
|
920 |
+
# if attention_mask is not None and position_ids is None:
|
921 |
+
# # create position_ids on the fly for batch generation
|
922 |
+
# position_ids = attention_mask.long().cumsum(-1) - 1
|
923 |
+
# position_ids.masked_fill_(attention_mask == 0, 1)
|
924 |
+
# if not empty_past_kv:
|
925 |
+
# position_ids = position_ids[:, -input_ids.shape[1] :]
|
926 |
+
|
927 |
+
# if `inputs_embeds` are passed, we only want to use them in the 1st generation step
|
928 |
+
if inputs_embeds is not None and empty_past_kv:
|
929 |
+
model_inputs = {"inputs_embeds": inputs_embeds}
|
930 |
+
else:
|
931 |
+
model_inputs = {"input_ids": input_ids.contiguous()} # `contiguous()` needed for compilation use cases
|
932 |
+
|
933 |
+
model_inputs.update(
|
934 |
+
{
|
935 |
+
# "position_ids": position_ids,
|
936 |
+
"past_key_values": past_key_values,
|
937 |
+
"use_cache": use_cache,
|
938 |
+
"attention_mask": attention_mask,
|
939 |
+
"cache_position": cache_position,
|
940 |
+
}
|
941 |
+
)
|
942 |
+
return model_inputs
|
943 |
+
|
944 |
+
|
945 |
+
__all__ = ["LFM2ForCausalLM", "LFM2Model", "LFM2PretrainedModel"]
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
transformers==4.53.0.dev0
|
2 |
+
tokenizers==0.21.1
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|startoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|im_end|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<|pad|>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,4074 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"0": {
|
6 |
+
"content": "<|pad|>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"1": {
|
14 |
+
"content": "<|startoftext|>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"2": {
|
22 |
+
"content": "<|endoftext|>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"3": {
|
30 |
+
"content": "<|fim_pre|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"4": {
|
38 |
+
"content": "<|fim_mid|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": true
|
44 |
+
},
|
45 |
+
"5": {
|
46 |
+
"content": "<|fim_suf|>",
|
47 |
+
"lstrip": false,
|
48 |
+
"normalized": false,
|
49 |
+
"rstrip": false,
|
50 |
+
"single_word": false,
|
51 |
+
"special": true
|
52 |
+
},
|
53 |
+
"6": {
|
54 |
+
"content": "<|im_start|>",
|
55 |
+
"lstrip": false,
|
56 |
+
"normalized": false,
|
57 |
+
"rstrip": false,
|
58 |
+
"single_word": false,
|
59 |
+
"special": true
|
60 |
+
},
|
61 |
+
"7": {
|
62 |
+
"content": "<|im_end|>",
|
63 |
+
"lstrip": false,
|
64 |
+
"normalized": false,
|
65 |
+
"rstrip": false,
|
66 |
+
"single_word": false,
|
67 |
+
"special": true
|
68 |
+
},
|
69 |
+
"8": {
|
70 |
+
"content": "<|tool_list_start|>",
|
71 |
+
"lstrip": false,
|
72 |
+
"normalized": false,
|
73 |
+
"rstrip": false,
|
74 |
+
"single_word": false,
|
75 |
+
"special": true
|
76 |
+
},
|
77 |
+
"9": {
|
78 |
+
"content": "<|tool_list_end|>",
|
79 |
+
"lstrip": false,
|
80 |
+
"normalized": false,
|
81 |
+
"rstrip": false,
|
82 |
+
"single_word": false,
|
83 |
+
"special": true
|
84 |
+
},
|
85 |
+
"10": {
|
86 |
+
"content": "<|tool_call_start|>",
|
87 |
+
"lstrip": false,
|
88 |
+
"normalized": false,
|
89 |
+
"rstrip": false,
|
90 |
+
"single_word": false,
|
91 |
+
"special": true
|
92 |
+
},
|
93 |
+
"11": {
|
94 |
+
"content": "<|tool_call_end|>",
|
95 |
+
"lstrip": false,
|
96 |
+
"normalized": false,
|
97 |
+
"rstrip": false,
|
98 |
+
"single_word": false,
|
99 |
+
"special": true
|
100 |
+
},
|
101 |
+
"12": {
|
102 |
+
"content": "<|tool_response_start|>",
|
103 |
+
"lstrip": false,
|
104 |
+
"normalized": false,
|
105 |
+
"rstrip": false,
|
106 |
+
"single_word": false,
|
107 |
+
"special": true
|
108 |
+
},
|
109 |
+
"13": {
|
110 |
+
"content": "<|tool_response_end|>",
|
111 |
+
"lstrip": false,
|
112 |
+
"normalized": false,
|
113 |
+
"rstrip": false,
|
114 |
+
"single_word": false,
|
115 |
+
"special": true
|
116 |
+
},
|
117 |
+
"16": {
|
118 |
+
"content": "<|reserved_6|>",
|
119 |
+
"lstrip": false,
|
120 |
+
"normalized": false,
|
121 |
+
"rstrip": false,
|
122 |
+
"single_word": false,
|
123 |
+
"special": true
|
124 |
+
},
|
125 |
+
"17": {
|
126 |
+
"content": "<|reserved_7|>",
|
127 |
+
"lstrip": false,
|
128 |
+
"normalized": false,
|
129 |
+
"rstrip": false,
|
130 |
+
"single_word": false,
|
131 |
+
"special": true
|
132 |
+
},
|
133 |
+
"18": {
|
134 |
+
"content": "<|reserved_8|>",
|
135 |
+
"lstrip": false,
|
136 |
+
"normalized": false,
|
137 |
+
"rstrip": false,
|
138 |
+
"single_word": false,
|
139 |
+
"special": true
|
140 |
+
},
|
141 |
+
"19": {
|
142 |
+
"content": "<|reserved_9|>",
|
143 |
+
"lstrip": false,
|
144 |
+
"normalized": false,
|
145 |
+
"rstrip": false,
|
146 |
+
"single_word": false,
|
147 |
+
"special": true
|
148 |
+
},
|
149 |
+
"20": {
|
150 |
+
"content": "<|reserved_10|>",
|
151 |
+
"lstrip": false,
|
152 |
+
"normalized": false,
|
153 |
+
"rstrip": false,
|
154 |
+
"single_word": false,
|
155 |
+
"special": true
|
156 |
+
},
|
157 |
+
"21": {
|
158 |
+
"content": "<|reserved_11|>",
|
159 |
+
"lstrip": false,
|
160 |
+
"normalized": false,
|
161 |
+
"rstrip": false,
|
162 |
+
"single_word": false,
|
163 |
+
"special": true
|
164 |
+
},
|
165 |
+
"22": {
|
166 |
+
"content": "<|reserved_12|>",
|
167 |
+
"lstrip": false,
|
168 |
+
"normalized": false,
|
169 |
+
"rstrip": false,
|
170 |
+
"single_word": false,
|
171 |
+
"special": true
|
172 |
+
},
|
173 |
+
"23": {
|
174 |
+
"content": "<|reserved_13|>",
|
175 |
+
"lstrip": false,
|
176 |
+
"normalized": false,
|
177 |
+
"rstrip": false,
|
178 |
+
"single_word": false,
|
179 |
+
"special": true
|
180 |
+
},
|
181 |
+
"24": {
|
182 |
+
"content": "<|reserved_14|>",
|
183 |
+
"lstrip": false,
|
184 |
+
"normalized": false,
|
185 |
+
"rstrip": false,
|
186 |
+
"single_word": false,
|
187 |
+
"special": true
|
188 |
+
},
|
189 |
+
"25": {
|
190 |
+
"content": "<|reserved_15|>",
|
191 |
+
"lstrip": false,
|
192 |
+
"normalized": false,
|
193 |
+
"rstrip": false,
|
194 |
+
"single_word": false,
|
195 |
+
"special": true
|
196 |
+
},
|
197 |
+
"26": {
|
198 |
+
"content": "<|reserved_16|>",
|
199 |
+
"lstrip": false,
|
200 |
+
"normalized": false,
|
201 |
+
"rstrip": false,
|
202 |
+
"single_word": false,
|
203 |
+
"special": true
|
204 |
+
},
|
205 |
+
"27": {
|
206 |
+
"content": "<|reserved_17|>",
|
207 |
+
"lstrip": false,
|
208 |
+
"normalized": false,
|
209 |
+
"rstrip": false,
|
210 |
+
"single_word": false,
|
211 |
+
"special": true
|
212 |
+
},
|
213 |
+
"28": {
|
214 |
+
"content": "<|reserved_18|>",
|
215 |
+
"lstrip": false,
|
216 |
+
"normalized": false,
|
217 |
+
"rstrip": false,
|
218 |
+
"single_word": false,
|
219 |
+
"special": true
|
220 |
+
},
|
221 |
+
"29": {
|
222 |
+
"content": "<|reserved_19|>",
|
223 |
+
"lstrip": false,
|
224 |
+
"normalized": false,
|
225 |
+
"rstrip": false,
|
226 |
+
"single_word": false,
|
227 |
+
"special": true
|
228 |
+
},
|
229 |
+
"30": {
|
230 |
+
"content": "<|reserved_20|>",
|
231 |
+
"lstrip": false,
|
232 |
+
"normalized": false,
|
233 |
+
"rstrip": false,
|
234 |
+
"single_word": false,
|
235 |
+
"special": true
|
236 |
+
},
|
237 |
+
"31": {
|
238 |
+
"content": "<|reserved_21|>",
|
239 |
+
"lstrip": false,
|
240 |
+
"normalized": false,
|
241 |
+
"rstrip": false,
|
242 |
+
"single_word": false,
|
243 |
+
"special": true
|
244 |
+
},
|
245 |
+
"32": {
|
246 |
+
"content": "<|reserved_22|>",
|
247 |
+
"lstrip": false,
|
248 |
+
"normalized": false,
|
249 |
+
"rstrip": false,
|
250 |
+
"single_word": false,
|
251 |
+
"special": true
|
252 |
+
},
|
253 |
+
"33": {
|
254 |
+
"content": "<|reserved_23|>",
|
255 |
+
"lstrip": false,
|
256 |
+
"normalized": false,
|
257 |
+
"rstrip": false,
|
258 |
+
"single_word": false,
|
259 |
+
"special": true
|
260 |
+
},
|
261 |
+
"34": {
|
262 |
+
"content": "<|reserved_24|>",
|
263 |
+
"lstrip": false,
|
264 |
+
"normalized": false,
|
265 |
+
"rstrip": false,
|
266 |
+
"single_word": false,
|
267 |
+
"special": true
|
268 |
+
},
|
269 |
+
"35": {
|
270 |
+
"content": "<|reserved_25|>",
|
271 |
+
"lstrip": false,
|
272 |
+
"normalized": false,
|
273 |
+
"rstrip": false,
|
274 |
+
"single_word": false,
|
275 |
+
"special": true
|
276 |
+
},
|
277 |
+
"36": {
|
278 |
+
"content": "<|reserved_26|>",
|
279 |
+
"lstrip": false,
|
280 |
+
"normalized": false,
|
281 |
+
"rstrip": false,
|
282 |
+
"single_word": false,
|
283 |
+
"special": true
|
284 |
+
},
|
285 |
+
"37": {
|
286 |
+
"content": "<|reserved_27|>",
|
287 |
+
"lstrip": false,
|
288 |
+
"normalized": false,
|
289 |
+
"rstrip": false,
|
290 |
+
"single_word": false,
|
291 |
+
"special": true
|
292 |
+
},
|
293 |
+
"38": {
|
294 |
+
"content": "<|reserved_28|>",
|
295 |
+
"lstrip": false,
|
296 |
+
"normalized": false,
|
297 |
+
"rstrip": false,
|
298 |
+
"single_word": false,
|
299 |
+
"special": true
|
300 |
+
},
|
301 |
+
"39": {
|
302 |
+
"content": "<|reserved_29|>",
|
303 |
+
"lstrip": false,
|
304 |
+
"normalized": false,
|
305 |
+
"rstrip": false,
|
306 |
+
"single_word": false,
|
307 |
+
"special": true
|
308 |
+
},
|
309 |
+
"40": {
|
310 |
+
"content": "<|reserved_30|>",
|
311 |
+
"lstrip": false,
|
312 |
+
"normalized": false,
|
313 |
+
"rstrip": false,
|
314 |
+
"single_word": false,
|
315 |
+
"special": true
|
316 |
+
},
|
317 |
+
"41": {
|
318 |
+
"content": "<|reserved_31|>",
|
319 |
+
"lstrip": false,
|
320 |
+
"normalized": false,
|
321 |
+
"rstrip": false,
|
322 |
+
"single_word": false,
|
323 |
+
"special": true
|
324 |
+
},
|
325 |
+
"42": {
|
326 |
+
"content": "<|reserved_32|>",
|
327 |
+
"lstrip": false,
|
328 |
+
"normalized": false,
|
329 |
+
"rstrip": false,
|
330 |
+
"single_word": false,
|
331 |
+
"special": true
|
332 |
+
},
|
333 |
+
"43": {
|
334 |
+
"content": "<|reserved_33|>",
|
335 |
+
"lstrip": false,
|
336 |
+
"normalized": false,
|
337 |
+
"rstrip": false,
|
338 |
+
"single_word": false,
|
339 |
+
"special": true
|
340 |
+
},
|
341 |
+
"44": {
|
342 |
+
"content": "<|reserved_34|>",
|
343 |
+
"lstrip": false,
|
344 |
+
"normalized": false,
|
345 |
+
"rstrip": false,
|
346 |
+
"single_word": false,
|
347 |
+
"special": true
|
348 |
+
},
|
349 |
+
"45": {
|
350 |
+
"content": "<|reserved_35|>",
|
351 |
+
"lstrip": false,
|
352 |
+
"normalized": false,
|
353 |
+
"rstrip": false,
|
354 |
+
"single_word": false,
|
355 |
+
"special": true
|
356 |
+
},
|
357 |
+
"46": {
|
358 |
+
"content": "<|reserved_36|>",
|
359 |
+
"lstrip": false,
|
360 |
+
"normalized": false,
|
361 |
+
"rstrip": false,
|
362 |
+
"single_word": false,
|
363 |
+
"special": true
|
364 |
+
},
|
365 |
+
"47": {
|
366 |
+
"content": "<|reserved_37|>",
|
367 |
+
"lstrip": false,
|
368 |
+
"normalized": false,
|
369 |
+
"rstrip": false,
|
370 |
+
"single_word": false,
|
371 |
+
"special": true
|
372 |
+
},
|
373 |
+
"48": {
|
374 |
+
"content": "<|reserved_38|>",
|
375 |
+
"lstrip": false,
|
376 |
+
"normalized": false,
|
377 |
+
"rstrip": false,
|
378 |
+
"single_word": false,
|
379 |
+
"special": true
|
380 |
+
},
|
381 |
+
"49": {
|
382 |
+
"content": "<|reserved_39|>",
|
383 |
+
"lstrip": false,
|
384 |
+
"normalized": false,
|
385 |
+
"rstrip": false,
|
386 |
+
"single_word": false,
|
387 |
+
"special": true
|
388 |
+
},
|
389 |
+
"50": {
|
390 |
+
"content": "<|reserved_40|>",
|
391 |
+
"lstrip": false,
|
392 |
+
"normalized": false,
|
393 |
+
"rstrip": false,
|
394 |
+
"single_word": false,
|
395 |
+
"special": true
|
396 |
+
},
|
397 |
+
"51": {
|
398 |
+
"content": "<|reserved_41|>",
|
399 |
+
"lstrip": false,
|
400 |
+
"normalized": false,
|
401 |
+
"rstrip": false,
|
402 |
+
"single_word": false,
|
403 |
+
"special": true
|
404 |
+
},
|
405 |
+
"52": {
|
406 |
+
"content": "<|reserved_42|>",
|
407 |
+
"lstrip": false,
|
408 |
+
"normalized": false,
|
409 |
+
"rstrip": false,
|
410 |
+
"single_word": false,
|
411 |
+
"special": true
|
412 |
+
},
|
413 |
+
"53": {
|
414 |
+
"content": "<|reserved_43|>",
|
415 |
+
"lstrip": false,
|
416 |
+
"normalized": false,
|
417 |
+
"rstrip": false,
|
418 |
+
"single_word": false,
|
419 |
+
"special": true
|
420 |
+
},
|
421 |
+
"54": {
|
422 |
+
"content": "<|reserved_44|>",
|
423 |
+
"lstrip": false,
|
424 |
+
"normalized": false,
|
425 |
+
"rstrip": false,
|
426 |
+
"single_word": false,
|
427 |
+
"special": true
|
428 |
+
},
|
429 |
+
"55": {
|
430 |
+
"content": "<|reserved_45|>",
|
431 |
+
"lstrip": false,
|
432 |
+
"normalized": false,
|
433 |
+
"rstrip": false,
|
434 |
+
"single_word": false,
|
435 |
+
"special": true
|
436 |
+
},
|
437 |
+
"56": {
|
438 |
+
"content": "<|reserved_46|>",
|
439 |
+
"lstrip": false,
|
440 |
+
"normalized": false,
|
441 |
+
"rstrip": false,
|
442 |
+
"single_word": false,
|
443 |
+
"special": true
|
444 |
+
},
|
445 |
+
"57": {
|
446 |
+
"content": "<|reserved_47|>",
|
447 |
+
"lstrip": false,
|
448 |
+
"normalized": false,
|
449 |
+
"rstrip": false,
|
450 |
+
"single_word": false,
|
451 |
+
"special": true
|
452 |
+
},
|
453 |
+
"58": {
|
454 |
+
"content": "<|reserved_48|>",
|
455 |
+
"lstrip": false,
|
456 |
+
"normalized": false,
|
457 |
+
"rstrip": false,
|
458 |
+
"single_word": false,
|
459 |
+
"special": true
|
460 |
+
},
|
461 |
+
"59": {
|
462 |
+
"content": "<|reserved_49|>",
|
463 |
+
"lstrip": false,
|
464 |
+
"normalized": false,
|
465 |
+
"rstrip": false,
|
466 |
+
"single_word": false,
|
467 |
+
"special": true
|
468 |
+
},
|
469 |
+
"60": {
|
470 |
+
"content": "<|reserved_50|>",
|
471 |
+
"lstrip": false,
|
472 |
+
"normalized": false,
|
473 |
+
"rstrip": false,
|
474 |
+
"single_word": false,
|
475 |
+
"special": true
|
476 |
+
},
|
477 |
+
"61": {
|
478 |
+
"content": "<|reserved_51|>",
|
479 |
+
"lstrip": false,
|
480 |
+
"normalized": false,
|
481 |
+
"rstrip": false,
|
482 |
+
"single_word": false,
|
483 |
+
"special": true
|
484 |
+
},
|
485 |
+
"62": {
|
486 |
+
"content": "<|reserved_52|>",
|
487 |
+
"lstrip": false,
|
488 |
+
"normalized": false,
|
489 |
+
"rstrip": false,
|
490 |
+
"single_word": false,
|
491 |
+
"special": true
|
492 |
+
},
|
493 |
+
"63": {
|
494 |
+
"content": "<|reserved_53|>",
|
495 |
+
"lstrip": false,
|
496 |
+
"normalized": false,
|
497 |
+
"rstrip": false,
|
498 |
+
"single_word": false,
|
499 |
+
"special": true
|
500 |
+
},
|
501 |
+
"64": {
|
502 |
+
"content": "<|reserved_54|>",
|
503 |
+
"lstrip": false,
|
504 |
+
"normalized": false,
|
505 |
+
"rstrip": false,
|
506 |
+
"single_word": false,
|
507 |
+
"special": true
|
508 |
+
},
|
509 |
+
"65": {
|
510 |
+
"content": "<|reserved_55|>",
|
511 |
+
"lstrip": false,
|
512 |
+
"normalized": false,
|
513 |
+
"rstrip": false,
|
514 |
+
"single_word": false,
|
515 |
+
"special": true
|
516 |
+
},
|
517 |
+
"66": {
|
518 |
+
"content": "<|reserved_56|>",
|
519 |
+
"lstrip": false,
|
520 |
+
"normalized": false,
|
521 |
+
"rstrip": false,
|
522 |
+
"single_word": false,
|
523 |
+
"special": true
|
524 |
+
},
|
525 |
+
"67": {
|
526 |
+
"content": "<|reserved_57|>",
|
527 |
+
"lstrip": false,
|
528 |
+
"normalized": false,
|
529 |
+
"rstrip": false,
|
530 |
+
"single_word": false,
|
531 |
+
"special": true
|
532 |
+
},
|
533 |
+
"68": {
|
534 |
+
"content": "<|reserved_58|>",
|
535 |
+
"lstrip": false,
|
536 |
+
"normalized": false,
|
537 |
+
"rstrip": false,
|
538 |
+
"single_word": false,
|
539 |
+
"special": true
|
540 |
+
},
|
541 |
+
"69": {
|
542 |
+
"content": "<|reserved_59|>",
|
543 |
+
"lstrip": false,
|
544 |
+
"normalized": false,
|
545 |
+
"rstrip": false,
|
546 |
+
"single_word": false,
|
547 |
+
"special": true
|
548 |
+
},
|
549 |
+
"70": {
|
550 |
+
"content": "<|reserved_60|>",
|
551 |
+
"lstrip": false,
|
552 |
+
"normalized": false,
|
553 |
+
"rstrip": false,
|
554 |
+
"single_word": false,
|
555 |
+
"special": true
|
556 |
+
},
|
557 |
+
"71": {
|
558 |
+
"content": "<|reserved_61|>",
|
559 |
+
"lstrip": false,
|
560 |
+
"normalized": false,
|
561 |
+
"rstrip": false,
|
562 |
+
"single_word": false,
|
563 |
+
"special": true
|
564 |
+
},
|
565 |
+
"72": {
|
566 |
+
"content": "<|reserved_62|>",
|
567 |
+
"lstrip": false,
|
568 |
+
"normalized": false,
|
569 |
+
"rstrip": false,
|
570 |
+
"single_word": false,
|
571 |
+
"special": true
|
572 |
+
},
|
573 |
+
"73": {
|
574 |
+
"content": "<|reserved_63|>",
|
575 |
+
"lstrip": false,
|
576 |
+
"normalized": false,
|
577 |
+
"rstrip": false,
|
578 |
+
"single_word": false,
|
579 |
+
"special": true
|
580 |
+
},
|
581 |
+
"74": {
|
582 |
+
"content": "<|reserved_64|>",
|
583 |
+
"lstrip": false,
|
584 |
+
"normalized": false,
|
585 |
+
"rstrip": false,
|
586 |
+
"single_word": false,
|
587 |
+
"special": true
|
588 |
+
},
|
589 |
+
"75": {
|
590 |
+
"content": "<|reserved_65|>",
|
591 |
+
"lstrip": false,
|
592 |
+
"normalized": false,
|
593 |
+
"rstrip": false,
|
594 |
+
"single_word": false,
|
595 |
+
"special": true
|
596 |
+
},
|
597 |
+
"76": {
|
598 |
+
"content": "<|reserved_66|>",
|
599 |
+
"lstrip": false,
|
600 |
+
"normalized": false,
|
601 |
+
"rstrip": false,
|
602 |
+
"single_word": false,
|
603 |
+
"special": true
|
604 |
+
},
|
605 |
+
"77": {
|
606 |
+
"content": "<|reserved_67|>",
|
607 |
+
"lstrip": false,
|
608 |
+
"normalized": false,
|
609 |
+
"rstrip": false,
|
610 |
+
"single_word": false,
|
611 |
+
"special": true
|
612 |
+
},
|
613 |
+
"78": {
|
614 |
+
"content": "<|reserved_68|>",
|
615 |
+
"lstrip": false,
|
616 |
+
"normalized": false,
|
617 |
+
"rstrip": false,
|
618 |
+
"single_word": false,
|
619 |
+
"special": true
|
620 |
+
},
|
621 |
+
"79": {
|
622 |
+
"content": "<|reserved_69|>",
|
623 |
+
"lstrip": false,
|
624 |
+
"normalized": false,
|
625 |
+
"rstrip": false,
|
626 |
+
"single_word": false,
|
627 |
+
"special": true
|
628 |
+
},
|
629 |
+
"80": {
|
630 |
+
"content": "<|reserved_70|>",
|
631 |
+
"lstrip": false,
|
632 |
+
"normalized": false,
|
633 |
+
"rstrip": false,
|
634 |
+
"single_word": false,
|
635 |
+
"special": true
|
636 |
+
},
|
637 |
+
"81": {
|
638 |
+
"content": "<|reserved_71|>",
|
639 |
+
"lstrip": false,
|
640 |
+
"normalized": false,
|
641 |
+
"rstrip": false,
|
642 |
+
"single_word": false,
|
643 |
+
"special": true
|
644 |
+
},
|
645 |
+
"82": {
|
646 |
+
"content": "<|reserved_72|>",
|
647 |
+
"lstrip": false,
|
648 |
+
"normalized": false,
|
649 |
+
"rstrip": false,
|
650 |
+
"single_word": false,
|
651 |
+
"special": true
|
652 |
+
},
|
653 |
+
"83": {
|
654 |
+
"content": "<|reserved_73|>",
|
655 |
+
"lstrip": false,
|
656 |
+
"normalized": false,
|
657 |
+
"rstrip": false,
|
658 |
+
"single_word": false,
|
659 |
+
"special": true
|
660 |
+
},
|
661 |
+
"84": {
|
662 |
+
"content": "<|reserved_74|>",
|
663 |
+
"lstrip": false,
|
664 |
+
"normalized": false,
|
665 |
+
"rstrip": false,
|
666 |
+
"single_word": false,
|
667 |
+
"special": true
|
668 |
+
},
|
669 |
+
"85": {
|
670 |
+
"content": "<|reserved_75|>",
|
671 |
+
"lstrip": false,
|
672 |
+
"normalized": false,
|
673 |
+
"rstrip": false,
|
674 |
+
"single_word": false,
|
675 |
+
"special": true
|
676 |
+
},
|
677 |
+
"86": {
|
678 |
+
"content": "<|reserved_76|>",
|
679 |
+
"lstrip": false,
|
680 |
+
"normalized": false,
|
681 |
+
"rstrip": false,
|
682 |
+
"single_word": false,
|
683 |
+
"special": true
|
684 |
+
},
|
685 |
+
"87": {
|
686 |
+
"content": "<|reserved_77|>",
|
687 |
+
"lstrip": false,
|
688 |
+
"normalized": false,
|
689 |
+
"rstrip": false,
|
690 |
+
"single_word": false,
|
691 |
+
"special": true
|
692 |
+
},
|
693 |
+
"88": {
|
694 |
+
"content": "<|reserved_78|>",
|
695 |
+
"lstrip": false,
|
696 |
+
"normalized": false,
|
697 |
+
"rstrip": false,
|
698 |
+
"single_word": false,
|
699 |
+
"special": true
|
700 |
+
},
|
701 |
+
"89": {
|
702 |
+
"content": "<|reserved_79|>",
|
703 |
+
"lstrip": false,
|
704 |
+
"normalized": false,
|
705 |
+
"rstrip": false,
|
706 |
+
"single_word": false,
|
707 |
+
"special": true
|
708 |
+
},
|
709 |
+
"90": {
|
710 |
+
"content": "<|reserved_80|>",
|
711 |
+
"lstrip": false,
|
712 |
+
"normalized": false,
|
713 |
+
"rstrip": false,
|
714 |
+
"single_word": false,
|
715 |
+
"special": true
|
716 |
+
},
|
717 |
+
"91": {
|
718 |
+
"content": "<|reserved_81|>",
|
719 |
+
"lstrip": false,
|
720 |
+
"normalized": false,
|
721 |
+
"rstrip": false,
|
722 |
+
"single_word": false,
|
723 |
+
"special": true
|
724 |
+
},
|
725 |
+
"92": {
|
726 |
+
"content": "<|reserved_82|>",
|
727 |
+
"lstrip": false,
|
728 |
+
"normalized": false,
|
729 |
+
"rstrip": false,
|
730 |
+
"single_word": false,
|
731 |
+
"special": true
|
732 |
+
},
|
733 |
+
"93": {
|
734 |
+
"content": "<|reserved_83|>",
|
735 |
+
"lstrip": false,
|
736 |
+
"normalized": false,
|
737 |
+
"rstrip": false,
|
738 |
+
"single_word": false,
|
739 |
+
"special": true
|
740 |
+
},
|
741 |
+
"94": {
|
742 |
+
"content": "<|reserved_84|>",
|
743 |
+
"lstrip": false,
|
744 |
+
"normalized": false,
|
745 |
+
"rstrip": false,
|
746 |
+
"single_word": false,
|
747 |
+
"special": true
|
748 |
+
},
|
749 |
+
"95": {
|
750 |
+
"content": "<|reserved_85|>",
|
751 |
+
"lstrip": false,
|
752 |
+
"normalized": false,
|
753 |
+
"rstrip": false,
|
754 |
+
"single_word": false,
|
755 |
+
"special": true
|
756 |
+
},
|
757 |
+
"96": {
|
758 |
+
"content": "<|reserved_86|>",
|
759 |
+
"lstrip": false,
|
760 |
+
"normalized": false,
|
761 |
+
"rstrip": false,
|
762 |
+
"single_word": false,
|
763 |
+
"special": true
|
764 |
+
},
|
765 |
+
"97": {
|
766 |
+
"content": "<|reserved_87|>",
|
767 |
+
"lstrip": false,
|
768 |
+
"normalized": false,
|
769 |
+
"rstrip": false,
|
770 |
+
"single_word": false,
|
771 |
+
"special": true
|
772 |
+
},
|
773 |
+
"98": {
|
774 |
+
"content": "<|reserved_88|>",
|
775 |
+
"lstrip": false,
|
776 |
+
"normalized": false,
|
777 |
+
"rstrip": false,
|
778 |
+
"single_word": false,
|
779 |
+
"special": true
|
780 |
+
},
|
781 |
+
"99": {
|
782 |
+
"content": "<|reserved_89|>",
|
783 |
+
"lstrip": false,
|
784 |
+
"normalized": false,
|
785 |
+
"rstrip": false,
|
786 |
+
"single_word": false,
|
787 |
+
"special": true
|
788 |
+
},
|
789 |
+
"100": {
|
790 |
+
"content": "<|reserved_90|>",
|
791 |
+
"lstrip": false,
|
792 |
+
"normalized": false,
|
793 |
+
"rstrip": false,
|
794 |
+
"single_word": false,
|
795 |
+
"special": true
|
796 |
+
},
|
797 |
+
"101": {
|
798 |
+
"content": "<|reserved_91|>",
|
799 |
+
"lstrip": false,
|
800 |
+
"normalized": false,
|
801 |
+
"rstrip": false,
|
802 |
+
"single_word": false,
|
803 |
+
"special": true
|
804 |
+
},
|
805 |
+
"102": {
|
806 |
+
"content": "<|reserved_92|>",
|
807 |
+
"lstrip": false,
|
808 |
+
"normalized": false,
|
809 |
+
"rstrip": false,
|
810 |
+
"single_word": false,
|
811 |
+
"special": true
|
812 |
+
},
|
813 |
+
"103": {
|
814 |
+
"content": "<|reserved_93|>",
|
815 |
+
"lstrip": false,
|
816 |
+
"normalized": false,
|
817 |
+
"rstrip": false,
|
818 |
+
"single_word": false,
|
819 |
+
"special": true
|
820 |
+
},
|
821 |
+
"104": {
|
822 |
+
"content": "<|reserved_94|>",
|
823 |
+
"lstrip": false,
|
824 |
+
"normalized": false,
|
825 |
+
"rstrip": false,
|
826 |
+
"single_word": false,
|
827 |
+
"special": true
|
828 |
+
},
|
829 |
+
"105": {
|
830 |
+
"content": "<|reserved_95|>",
|
831 |
+
"lstrip": false,
|
832 |
+
"normalized": false,
|
833 |
+
"rstrip": false,
|
834 |
+
"single_word": false,
|
835 |
+
"special": true
|
836 |
+
},
|
837 |
+
"106": {
|
838 |
+
"content": "<|reserved_96|>",
|
839 |
+
"lstrip": false,
|
840 |
+
"normalized": false,
|
841 |
+
"rstrip": false,
|
842 |
+
"single_word": false,
|
843 |
+
"special": true
|
844 |
+
},
|
845 |
+
"107": {
|
846 |
+
"content": "<|reserved_97|>",
|
847 |
+
"lstrip": false,
|
848 |
+
"normalized": false,
|
849 |
+
"rstrip": false,
|
850 |
+
"single_word": false,
|
851 |
+
"special": true
|
852 |
+
},
|
853 |
+
"108": {
|
854 |
+
"content": "<|reserved_98|>",
|
855 |
+
"lstrip": false,
|
856 |
+
"normalized": false,
|
857 |
+
"rstrip": false,
|
858 |
+
"single_word": false,
|
859 |
+
"special": true
|
860 |
+
},
|
861 |
+
"109": {
|
862 |
+
"content": "<|reserved_99|>",
|
863 |
+
"lstrip": false,
|
864 |
+
"normalized": false,
|
865 |
+
"rstrip": false,
|
866 |
+
"single_word": false,
|
867 |
+
"special": true
|
868 |
+
},
|
869 |
+
"110": {
|
870 |
+
"content": "<|reserved_100|>",
|
871 |
+
"lstrip": false,
|
872 |
+
"normalized": false,
|
873 |
+
"rstrip": false,
|
874 |
+
"single_word": false,
|
875 |
+
"special": true
|
876 |
+
},
|
877 |
+
"111": {
|
878 |
+
"content": "<|reserved_101|>",
|
879 |
+
"lstrip": false,
|
880 |
+
"normalized": false,
|
881 |
+
"rstrip": false,
|
882 |
+
"single_word": false,
|
883 |
+
"special": true
|
884 |
+
},
|
885 |
+
"112": {
|
886 |
+
"content": "<|reserved_102|>",
|
887 |
+
"lstrip": false,
|
888 |
+
"normalized": false,
|
889 |
+
"rstrip": false,
|
890 |
+
"single_word": false,
|
891 |
+
"special": true
|
892 |
+
},
|
893 |
+
"113": {
|
894 |
+
"content": "<|reserved_103|>",
|
895 |
+
"lstrip": false,
|
896 |
+
"normalized": false,
|
897 |
+
"rstrip": false,
|
898 |
+
"single_word": false,
|
899 |
+
"special": true
|
900 |
+
},
|
901 |
+
"114": {
|
902 |
+
"content": "<|reserved_104|>",
|
903 |
+
"lstrip": false,
|
904 |
+
"normalized": false,
|
905 |
+
"rstrip": false,
|
906 |
+
"single_word": false,
|
907 |
+
"special": true
|
908 |
+
},
|
909 |
+
"115": {
|
910 |
+
"content": "<|reserved_105|>",
|
911 |
+
"lstrip": false,
|
912 |
+
"normalized": false,
|
913 |
+
"rstrip": false,
|
914 |
+
"single_word": false,
|
915 |
+
"special": true
|
916 |
+
},
|
917 |
+
"116": {
|
918 |
+
"content": "<|reserved_106|>",
|
919 |
+
"lstrip": false,
|
920 |
+
"normalized": false,
|
921 |
+
"rstrip": false,
|
922 |
+
"single_word": false,
|
923 |
+
"special": true
|
924 |
+
},
|
925 |
+
"117": {
|
926 |
+
"content": "<|reserved_107|>",
|
927 |
+
"lstrip": false,
|
928 |
+
"normalized": false,
|
929 |
+
"rstrip": false,
|
930 |
+
"single_word": false,
|
931 |
+
"special": true
|
932 |
+
},
|
933 |
+
"118": {
|
934 |
+
"content": "<|reserved_108|>",
|
935 |
+
"lstrip": false,
|
936 |
+
"normalized": false,
|
937 |
+
"rstrip": false,
|
938 |
+
"single_word": false,
|
939 |
+
"special": true
|
940 |
+
},
|
941 |
+
"119": {
|
942 |
+
"content": "<|reserved_109|>",
|
943 |
+
"lstrip": false,
|
944 |
+
"normalized": false,
|
945 |
+
"rstrip": false,
|
946 |
+
"single_word": false,
|
947 |
+
"special": true
|
948 |
+
},
|
949 |
+
"120": {
|
950 |
+
"content": "<|reserved_110|>",
|
951 |
+
"lstrip": false,
|
952 |
+
"normalized": false,
|
953 |
+
"rstrip": false,
|
954 |
+
"single_word": false,
|
955 |
+
"special": true
|
956 |
+
},
|
957 |
+
"121": {
|
958 |
+
"content": "<|reserved_111|>",
|
959 |
+
"lstrip": false,
|
960 |
+
"normalized": false,
|
961 |
+
"rstrip": false,
|
962 |
+
"single_word": false,
|
963 |
+
"special": true
|
964 |
+
},
|
965 |
+
"122": {
|
966 |
+
"content": "<|reserved_112|>",
|
967 |
+
"lstrip": false,
|
968 |
+
"normalized": false,
|
969 |
+
"rstrip": false,
|
970 |
+
"single_word": false,
|
971 |
+
"special": true
|
972 |
+
},
|
973 |
+
"123": {
|
974 |
+
"content": "<|reserved_113|>",
|
975 |
+
"lstrip": false,
|
976 |
+
"normalized": false,
|
977 |
+
"rstrip": false,
|
978 |
+
"single_word": false,
|
979 |
+
"special": true
|
980 |
+
},
|
981 |
+
"124": {
|
982 |
+
"content": "<|reserved_114|>",
|
983 |
+
"lstrip": false,
|
984 |
+
"normalized": false,
|
985 |
+
"rstrip": false,
|
986 |
+
"single_word": false,
|
987 |
+
"special": true
|
988 |
+
},
|
989 |
+
"125": {
|
990 |
+
"content": "<|reserved_115|>",
|
991 |
+
"lstrip": false,
|
992 |
+
"normalized": false,
|
993 |
+
"rstrip": false,
|
994 |
+
"single_word": false,
|
995 |
+
"special": true
|
996 |
+
},
|
997 |
+
"126": {
|
998 |
+
"content": "<|reserved_116|>",
|
999 |
+
"lstrip": false,
|
1000 |
+
"normalized": false,
|
1001 |
+
"rstrip": false,
|
1002 |
+
"single_word": false,
|
1003 |
+
"special": true
|
1004 |
+
},
|
1005 |
+
"127": {
|
1006 |
+
"content": "<|reserved_117|>",
|
1007 |
+
"lstrip": false,
|
1008 |
+
"normalized": false,
|
1009 |
+
"rstrip": false,
|
1010 |
+
"single_word": false,
|
1011 |
+
"special": true
|
1012 |
+
},
|
1013 |
+
"128": {
|
1014 |
+
"content": "<|reserved_118|>",
|
1015 |
+
"lstrip": false,
|
1016 |
+
"normalized": false,
|
1017 |
+
"rstrip": false,
|
1018 |
+
"single_word": false,
|
1019 |
+
"special": true
|
1020 |
+
},
|
1021 |
+
"129": {
|
1022 |
+
"content": "<|reserved_119|>",
|
1023 |
+
"lstrip": false,
|
1024 |
+
"normalized": false,
|
1025 |
+
"rstrip": false,
|
1026 |
+
"single_word": false,
|
1027 |
+
"special": true
|
1028 |
+
},
|
1029 |
+
"130": {
|
1030 |
+
"content": "<|reserved_120|>",
|
1031 |
+
"lstrip": false,
|
1032 |
+
"normalized": false,
|
1033 |
+
"rstrip": false,
|
1034 |
+
"single_word": false,
|
1035 |
+
"special": true
|
1036 |
+
},
|
1037 |
+
"131": {
|
1038 |
+
"content": "<|reserved_121|>",
|
1039 |
+
"lstrip": false,
|
1040 |
+
"normalized": false,
|
1041 |
+
"rstrip": false,
|
1042 |
+
"single_word": false,
|
1043 |
+
"special": true
|
1044 |
+
},
|
1045 |
+
"132": {
|
1046 |
+
"content": "<|reserved_122|>",
|
1047 |
+
"lstrip": false,
|
1048 |
+
"normalized": false,
|
1049 |
+
"rstrip": false,
|
1050 |
+
"single_word": false,
|
1051 |
+
"special": true
|
1052 |
+
},
|
1053 |
+
"133": {
|
1054 |
+
"content": "<|reserved_123|>",
|
1055 |
+
"lstrip": false,
|
1056 |
+
"normalized": false,
|
1057 |
+
"rstrip": false,
|
1058 |
+
"single_word": false,
|
1059 |
+
"special": true
|
1060 |
+
},
|
1061 |
+
"134": {
|
1062 |
+
"content": "<|reserved_124|>",
|
1063 |
+
"lstrip": false,
|
1064 |
+
"normalized": false,
|
1065 |
+
"rstrip": false,
|
1066 |
+
"single_word": false,
|
1067 |
+
"special": true
|
1068 |
+
},
|
1069 |
+
"135": {
|
1070 |
+
"content": "<|reserved_125|>",
|
1071 |
+
"lstrip": false,
|
1072 |
+
"normalized": false,
|
1073 |
+
"rstrip": false,
|
1074 |
+
"single_word": false,
|
1075 |
+
"special": true
|
1076 |
+
},
|
1077 |
+
"136": {
|
1078 |
+
"content": "<|reserved_126|>",
|
1079 |
+
"lstrip": false,
|
1080 |
+
"normalized": false,
|
1081 |
+
"rstrip": false,
|
1082 |
+
"single_word": false,
|
1083 |
+
"special": true
|
1084 |
+
},
|
1085 |
+
"137": {
|
1086 |
+
"content": "<|reserved_127|>",
|
1087 |
+
"lstrip": false,
|
1088 |
+
"normalized": false,
|
1089 |
+
"rstrip": false,
|
1090 |
+
"single_word": false,
|
1091 |
+
"special": true
|
1092 |
+
},
|
1093 |
+
"138": {
|
1094 |
+
"content": "<|reserved_128|>",
|
1095 |
+
"lstrip": false,
|
1096 |
+
"normalized": false,
|
1097 |
+
"rstrip": false,
|
1098 |
+
"single_word": false,
|
1099 |
+
"special": true
|
1100 |
+
},
|
1101 |
+
"139": {
|
1102 |
+
"content": "<|reserved_129|>",
|
1103 |
+
"lstrip": false,
|
1104 |
+
"normalized": false,
|
1105 |
+
"rstrip": false,
|
1106 |
+
"single_word": false,
|
1107 |
+
"special": true
|
1108 |
+
},
|
1109 |
+
"140": {
|
1110 |
+
"content": "<|reserved_130|>",
|
1111 |
+
"lstrip": false,
|
1112 |
+
"normalized": false,
|
1113 |
+
"rstrip": false,
|
1114 |
+
"single_word": false,
|
1115 |
+
"special": true
|
1116 |
+
},
|
1117 |
+
"141": {
|
1118 |
+
"content": "<|reserved_131|>",
|
1119 |
+
"lstrip": false,
|
1120 |
+
"normalized": false,
|
1121 |
+
"rstrip": false,
|
1122 |
+
"single_word": false,
|
1123 |
+
"special": true
|
1124 |
+
},
|
1125 |
+
"142": {
|
1126 |
+
"content": "<|reserved_132|>",
|
1127 |
+
"lstrip": false,
|
1128 |
+
"normalized": false,
|
1129 |
+
"rstrip": false,
|
1130 |
+
"single_word": false,
|
1131 |
+
"special": true
|
1132 |
+
},
|
1133 |
+
"143": {
|
1134 |
+
"content": "<|reserved_133|>",
|
1135 |
+
"lstrip": false,
|
1136 |
+
"normalized": false,
|
1137 |
+
"rstrip": false,
|
1138 |
+
"single_word": false,
|
1139 |
+
"special": true
|
1140 |
+
},
|
1141 |
+
"144": {
|
1142 |
+
"content": "<|reserved_134|>",
|
1143 |
+
"lstrip": false,
|
1144 |
+
"normalized": false,
|
1145 |
+
"rstrip": false,
|
1146 |
+
"single_word": false,
|
1147 |
+
"special": true
|
1148 |
+
},
|
1149 |
+
"145": {
|
1150 |
+
"content": "<|reserved_135|>",
|
1151 |
+
"lstrip": false,
|
1152 |
+
"normalized": false,
|
1153 |
+
"rstrip": false,
|
1154 |
+
"single_word": false,
|
1155 |
+
"special": true
|
1156 |
+
},
|
1157 |
+
"146": {
|
1158 |
+
"content": "<|reserved_136|>",
|
1159 |
+
"lstrip": false,
|
1160 |
+
"normalized": false,
|
1161 |
+
"rstrip": false,
|
1162 |
+
"single_word": false,
|
1163 |
+
"special": true
|
1164 |
+
},
|
1165 |
+
"147": {
|
1166 |
+
"content": "<|reserved_137|>",
|
1167 |
+
"lstrip": false,
|
1168 |
+
"normalized": false,
|
1169 |
+
"rstrip": false,
|
1170 |
+
"single_word": false,
|
1171 |
+
"special": true
|
1172 |
+
},
|
1173 |
+
"148": {
|
1174 |
+
"content": "<|reserved_138|>",
|
1175 |
+
"lstrip": false,
|
1176 |
+
"normalized": false,
|
1177 |
+
"rstrip": false,
|
1178 |
+
"single_word": false,
|
1179 |
+
"special": true
|
1180 |
+
},
|
1181 |
+
"149": {
|
1182 |
+
"content": "<|reserved_139|>",
|
1183 |
+
"lstrip": false,
|
1184 |
+
"normalized": false,
|
1185 |
+
"rstrip": false,
|
1186 |
+
"single_word": false,
|
1187 |
+
"special": true
|
1188 |
+
},
|
1189 |
+
"150": {
|
1190 |
+
"content": "<|reserved_140|>",
|
1191 |
+
"lstrip": false,
|
1192 |
+
"normalized": false,
|
1193 |
+
"rstrip": false,
|
1194 |
+
"single_word": false,
|
1195 |
+
"special": true
|
1196 |
+
},
|
1197 |
+
"151": {
|
1198 |
+
"content": "<|reserved_141|>",
|
1199 |
+
"lstrip": false,
|
1200 |
+
"normalized": false,
|
1201 |
+
"rstrip": false,
|
1202 |
+
"single_word": false,
|
1203 |
+
"special": true
|
1204 |
+
},
|
1205 |
+
"152": {
|
1206 |
+
"content": "<|reserved_142|>",
|
1207 |
+
"lstrip": false,
|
1208 |
+
"normalized": false,
|
1209 |
+
"rstrip": false,
|
1210 |
+
"single_word": false,
|
1211 |
+
"special": true
|
1212 |
+
},
|
1213 |
+
"153": {
|
1214 |
+
"content": "<|reserved_143|>",
|
1215 |
+
"lstrip": false,
|
1216 |
+
"normalized": false,
|
1217 |
+
"rstrip": false,
|
1218 |
+
"single_word": false,
|
1219 |
+
"special": true
|
1220 |
+
},
|
1221 |
+
"154": {
|
1222 |
+
"content": "<|reserved_144|>",
|
1223 |
+
"lstrip": false,
|
1224 |
+
"normalized": false,
|
1225 |
+
"rstrip": false,
|
1226 |
+
"single_word": false,
|
1227 |
+
"special": true
|
1228 |
+
},
|
1229 |
+
"155": {
|
1230 |
+
"content": "<|reserved_145|>",
|
1231 |
+
"lstrip": false,
|
1232 |
+
"normalized": false,
|
1233 |
+
"rstrip": false,
|
1234 |
+
"single_word": false,
|
1235 |
+
"special": true
|
1236 |
+
},
|
1237 |
+
"156": {
|
1238 |
+
"content": "<|reserved_146|>",
|
1239 |
+
"lstrip": false,
|
1240 |
+
"normalized": false,
|
1241 |
+
"rstrip": false,
|
1242 |
+
"single_word": false,
|
1243 |
+
"special": true
|
1244 |
+
},
|
1245 |
+
"157": {
|
1246 |
+
"content": "<|reserved_147|>",
|
1247 |
+
"lstrip": false,
|
1248 |
+
"normalized": false,
|
1249 |
+
"rstrip": false,
|
1250 |
+
"single_word": false,
|
1251 |
+
"special": true
|
1252 |
+
},
|
1253 |
+
"158": {
|
1254 |
+
"content": "<|reserved_148|>",
|
1255 |
+
"lstrip": false,
|
1256 |
+
"normalized": false,
|
1257 |
+
"rstrip": false,
|
1258 |
+
"single_word": false,
|
1259 |
+
"special": true
|
1260 |
+
},
|
1261 |
+
"159": {
|
1262 |
+
"content": "<|reserved_149|>",
|
1263 |
+
"lstrip": false,
|
1264 |
+
"normalized": false,
|
1265 |
+
"rstrip": false,
|
1266 |
+
"single_word": false,
|
1267 |
+
"special": true
|
1268 |
+
},
|
1269 |
+
"160": {
|
1270 |
+
"content": "<|reserved_150|>",
|
1271 |
+
"lstrip": false,
|
1272 |
+
"normalized": false,
|
1273 |
+
"rstrip": false,
|
1274 |
+
"single_word": false,
|
1275 |
+
"special": true
|
1276 |
+
},
|
1277 |
+
"161": {
|
1278 |
+
"content": "<|reserved_151|>",
|
1279 |
+
"lstrip": false,
|
1280 |
+
"normalized": false,
|
1281 |
+
"rstrip": false,
|
1282 |
+
"single_word": false,
|
1283 |
+
"special": true
|
1284 |
+
},
|
1285 |
+
"162": {
|
1286 |
+
"content": "<|reserved_152|>",
|
1287 |
+
"lstrip": false,
|
1288 |
+
"normalized": false,
|
1289 |
+
"rstrip": false,
|
1290 |
+
"single_word": false,
|
1291 |
+
"special": true
|
1292 |
+
},
|
1293 |
+
"163": {
|
1294 |
+
"content": "<|reserved_153|>",
|
1295 |
+
"lstrip": false,
|
1296 |
+
"normalized": false,
|
1297 |
+
"rstrip": false,
|
1298 |
+
"single_word": false,
|
1299 |
+
"special": true
|
1300 |
+
},
|
1301 |
+
"164": {
|
1302 |
+
"content": "<|reserved_154|>",
|
1303 |
+
"lstrip": false,
|
1304 |
+
"normalized": false,
|
1305 |
+
"rstrip": false,
|
1306 |
+
"single_word": false,
|
1307 |
+
"special": true
|
1308 |
+
},
|
1309 |
+
"165": {
|
1310 |
+
"content": "<|reserved_155|>",
|
1311 |
+
"lstrip": false,
|
1312 |
+
"normalized": false,
|
1313 |
+
"rstrip": false,
|
1314 |
+
"single_word": false,
|
1315 |
+
"special": true
|
1316 |
+
},
|
1317 |
+
"166": {
|
1318 |
+
"content": "<|reserved_156|>",
|
1319 |
+
"lstrip": false,
|
1320 |
+
"normalized": false,
|
1321 |
+
"rstrip": false,
|
1322 |
+
"single_word": false,
|
1323 |
+
"special": true
|
1324 |
+
},
|
1325 |
+
"167": {
|
1326 |
+
"content": "<|reserved_157|>",
|
1327 |
+
"lstrip": false,
|
1328 |
+
"normalized": false,
|
1329 |
+
"rstrip": false,
|
1330 |
+
"single_word": false,
|
1331 |
+
"special": true
|
1332 |
+
},
|
1333 |
+
"168": {
|
1334 |
+
"content": "<|reserved_158|>",
|
1335 |
+
"lstrip": false,
|
1336 |
+
"normalized": false,
|
1337 |
+
"rstrip": false,
|
1338 |
+
"single_word": false,
|
1339 |
+
"special": true
|
1340 |
+
},
|
1341 |
+
"169": {
|
1342 |
+
"content": "<|reserved_159|>",
|
1343 |
+
"lstrip": false,
|
1344 |
+
"normalized": false,
|
1345 |
+
"rstrip": false,
|
1346 |
+
"single_word": false,
|
1347 |
+
"special": true
|
1348 |
+
},
|
1349 |
+
"170": {
|
1350 |
+
"content": "<|reserved_160|>",
|
1351 |
+
"lstrip": false,
|
1352 |
+
"normalized": false,
|
1353 |
+
"rstrip": false,
|
1354 |
+
"single_word": false,
|
1355 |
+
"special": true
|
1356 |
+
},
|
1357 |
+
"171": {
|
1358 |
+
"content": "<|reserved_161|>",
|
1359 |
+
"lstrip": false,
|
1360 |
+
"normalized": false,
|
1361 |
+
"rstrip": false,
|
1362 |
+
"single_word": false,
|
1363 |
+
"special": true
|
1364 |
+
},
|
1365 |
+
"172": {
|
1366 |
+
"content": "<|reserved_162|>",
|
1367 |
+
"lstrip": false,
|
1368 |
+
"normalized": false,
|
1369 |
+
"rstrip": false,
|
1370 |
+
"single_word": false,
|
1371 |
+
"special": true
|
1372 |
+
},
|
1373 |
+
"173": {
|
1374 |
+
"content": "<|reserved_163|>",
|
1375 |
+
"lstrip": false,
|
1376 |
+
"normalized": false,
|
1377 |
+
"rstrip": false,
|
1378 |
+
"single_word": false,
|
1379 |
+
"special": true
|
1380 |
+
},
|
1381 |
+
"174": {
|
1382 |
+
"content": "<|reserved_164|>",
|
1383 |
+
"lstrip": false,
|
1384 |
+
"normalized": false,
|
1385 |
+
"rstrip": false,
|
1386 |
+
"single_word": false,
|
1387 |
+
"special": true
|
1388 |
+
},
|
1389 |
+
"175": {
|
1390 |
+
"content": "<|reserved_165|>",
|
1391 |
+
"lstrip": false,
|
1392 |
+
"normalized": false,
|
1393 |
+
"rstrip": false,
|
1394 |
+
"single_word": false,
|
1395 |
+
"special": true
|
1396 |
+
},
|
1397 |
+
"176": {
|
1398 |
+
"content": "<|reserved_166|>",
|
1399 |
+
"lstrip": false,
|
1400 |
+
"normalized": false,
|
1401 |
+
"rstrip": false,
|
1402 |
+
"single_word": false,
|
1403 |
+
"special": true
|
1404 |
+
},
|
1405 |
+
"177": {
|
1406 |
+
"content": "<|reserved_167|>",
|
1407 |
+
"lstrip": false,
|
1408 |
+
"normalized": false,
|
1409 |
+
"rstrip": false,
|
1410 |
+
"single_word": false,
|
1411 |
+
"special": true
|
1412 |
+
},
|
1413 |
+
"178": {
|
1414 |
+
"content": "<|reserved_168|>",
|
1415 |
+
"lstrip": false,
|
1416 |
+
"normalized": false,
|
1417 |
+
"rstrip": false,
|
1418 |
+
"single_word": false,
|
1419 |
+
"special": true
|
1420 |
+
},
|
1421 |
+
"179": {
|
1422 |
+
"content": "<|reserved_169|>",
|
1423 |
+
"lstrip": false,
|
1424 |
+
"normalized": false,
|
1425 |
+
"rstrip": false,
|
1426 |
+
"single_word": false,
|
1427 |
+
"special": true
|
1428 |
+
},
|
1429 |
+
"180": {
|
1430 |
+
"content": "<|reserved_170|>",
|
1431 |
+
"lstrip": false,
|
1432 |
+
"normalized": false,
|
1433 |
+
"rstrip": false,
|
1434 |
+
"single_word": false,
|
1435 |
+
"special": true
|
1436 |
+
},
|
1437 |
+
"181": {
|
1438 |
+
"content": "<|reserved_171|>",
|
1439 |
+
"lstrip": false,
|
1440 |
+
"normalized": false,
|
1441 |
+
"rstrip": false,
|
1442 |
+
"single_word": false,
|
1443 |
+
"special": true
|
1444 |
+
},
|
1445 |
+
"182": {
|
1446 |
+
"content": "<|reserved_172|>",
|
1447 |
+
"lstrip": false,
|
1448 |
+
"normalized": false,
|
1449 |
+
"rstrip": false,
|
1450 |
+
"single_word": false,
|
1451 |
+
"special": true
|
1452 |
+
},
|
1453 |
+
"183": {
|
1454 |
+
"content": "<|reserved_173|>",
|
1455 |
+
"lstrip": false,
|
1456 |
+
"normalized": false,
|
1457 |
+
"rstrip": false,
|
1458 |
+
"single_word": false,
|
1459 |
+
"special": true
|
1460 |
+
},
|
1461 |
+
"184": {
|
1462 |
+
"content": "<|reserved_174|>",
|
1463 |
+
"lstrip": false,
|
1464 |
+
"normalized": false,
|
1465 |
+
"rstrip": false,
|
1466 |
+
"single_word": false,
|
1467 |
+
"special": true
|
1468 |
+
},
|
1469 |
+
"185": {
|
1470 |
+
"content": "<|reserved_175|>",
|
1471 |
+
"lstrip": false,
|
1472 |
+
"normalized": false,
|
1473 |
+
"rstrip": false,
|
1474 |
+
"single_word": false,
|
1475 |
+
"special": true
|
1476 |
+
},
|
1477 |
+
"186": {
|
1478 |
+
"content": "<|reserved_176|>",
|
1479 |
+
"lstrip": false,
|
1480 |
+
"normalized": false,
|
1481 |
+
"rstrip": false,
|
1482 |
+
"single_word": false,
|
1483 |
+
"special": true
|
1484 |
+
},
|
1485 |
+
"187": {
|
1486 |
+
"content": "<|reserved_177|>",
|
1487 |
+
"lstrip": false,
|
1488 |
+
"normalized": false,
|
1489 |
+
"rstrip": false,
|
1490 |
+
"single_word": false,
|
1491 |
+
"special": true
|
1492 |
+
},
|
1493 |
+
"188": {
|
1494 |
+
"content": "<|reserved_178|>",
|
1495 |
+
"lstrip": false,
|
1496 |
+
"normalized": false,
|
1497 |
+
"rstrip": false,
|
1498 |
+
"single_word": false,
|
1499 |
+
"special": true
|
1500 |
+
},
|
1501 |
+
"189": {
|
1502 |
+
"content": "<|reserved_179|>",
|
1503 |
+
"lstrip": false,
|
1504 |
+
"normalized": false,
|
1505 |
+
"rstrip": false,
|
1506 |
+
"single_word": false,
|
1507 |
+
"special": true
|
1508 |
+
},
|
1509 |
+
"190": {
|
1510 |
+
"content": "<|reserved_180|>",
|
1511 |
+
"lstrip": false,
|
1512 |
+
"normalized": false,
|
1513 |
+
"rstrip": false,
|
1514 |
+
"single_word": false,
|
1515 |
+
"special": true
|
1516 |
+
},
|
1517 |
+
"191": {
|
1518 |
+
"content": "<|reserved_181|>",
|
1519 |
+
"lstrip": false,
|
1520 |
+
"normalized": false,
|
1521 |
+
"rstrip": false,
|
1522 |
+
"single_word": false,
|
1523 |
+
"special": true
|
1524 |
+
},
|
1525 |
+
"192": {
|
1526 |
+
"content": "<|reserved_182|>",
|
1527 |
+
"lstrip": false,
|
1528 |
+
"normalized": false,
|
1529 |
+
"rstrip": false,
|
1530 |
+
"single_word": false,
|
1531 |
+
"special": true
|
1532 |
+
},
|
1533 |
+
"193": {
|
1534 |
+
"content": "<|reserved_183|>",
|
1535 |
+
"lstrip": false,
|
1536 |
+
"normalized": false,
|
1537 |
+
"rstrip": false,
|
1538 |
+
"single_word": false,
|
1539 |
+
"special": true
|
1540 |
+
},
|
1541 |
+
"194": {
|
1542 |
+
"content": "<|reserved_184|>",
|
1543 |
+
"lstrip": false,
|
1544 |
+
"normalized": false,
|
1545 |
+
"rstrip": false,
|
1546 |
+
"single_word": false,
|
1547 |
+
"special": true
|
1548 |
+
},
|
1549 |
+
"195": {
|
1550 |
+
"content": "<|reserved_185|>",
|
1551 |
+
"lstrip": false,
|
1552 |
+
"normalized": false,
|
1553 |
+
"rstrip": false,
|
1554 |
+
"single_word": false,
|
1555 |
+
"special": true
|
1556 |
+
},
|
1557 |
+
"196": {
|
1558 |
+
"content": "<|reserved_186|>",
|
1559 |
+
"lstrip": false,
|
1560 |
+
"normalized": false,
|
1561 |
+
"rstrip": false,
|
1562 |
+
"single_word": false,
|
1563 |
+
"special": true
|
1564 |
+
},
|
1565 |
+
"197": {
|
1566 |
+
"content": "<|reserved_187|>",
|
1567 |
+
"lstrip": false,
|
1568 |
+
"normalized": false,
|
1569 |
+
"rstrip": false,
|
1570 |
+
"single_word": false,
|
1571 |
+
"special": true
|
1572 |
+
},
|
1573 |
+
"198": {
|
1574 |
+
"content": "<|reserved_188|>",
|
1575 |
+
"lstrip": false,
|
1576 |
+
"normalized": false,
|
1577 |
+
"rstrip": false,
|
1578 |
+
"single_word": false,
|
1579 |
+
"special": true
|
1580 |
+
},
|
1581 |
+
"199": {
|
1582 |
+
"content": "<|reserved_189|>",
|
1583 |
+
"lstrip": false,
|
1584 |
+
"normalized": false,
|
1585 |
+
"rstrip": false,
|
1586 |
+
"single_word": false,
|
1587 |
+
"special": true
|
1588 |
+
},
|
1589 |
+
"200": {
|
1590 |
+
"content": "<|reserved_190|>",
|
1591 |
+
"lstrip": false,
|
1592 |
+
"normalized": false,
|
1593 |
+
"rstrip": false,
|
1594 |
+
"single_word": false,
|
1595 |
+
"special": true
|
1596 |
+
},
|
1597 |
+
"201": {
|
1598 |
+
"content": "<|reserved_191|>",
|
1599 |
+
"lstrip": false,
|
1600 |
+
"normalized": false,
|
1601 |
+
"rstrip": false,
|
1602 |
+
"single_word": false,
|
1603 |
+
"special": true
|
1604 |
+
},
|
1605 |
+
"202": {
|
1606 |
+
"content": "<|reserved_192|>",
|
1607 |
+
"lstrip": false,
|
1608 |
+
"normalized": false,
|
1609 |
+
"rstrip": false,
|
1610 |
+
"single_word": false,
|
1611 |
+
"special": true
|
1612 |
+
},
|
1613 |
+
"203": {
|
1614 |
+
"content": "<|reserved_193|>",
|
1615 |
+
"lstrip": false,
|
1616 |
+
"normalized": false,
|
1617 |
+
"rstrip": false,
|
1618 |
+
"single_word": false,
|
1619 |
+
"special": true
|
1620 |
+
},
|
1621 |
+
"204": {
|
1622 |
+
"content": "<|reserved_194|>",
|
1623 |
+
"lstrip": false,
|
1624 |
+
"normalized": false,
|
1625 |
+
"rstrip": false,
|
1626 |
+
"single_word": false,
|
1627 |
+
"special": true
|
1628 |
+
},
|
1629 |
+
"205": {
|
1630 |
+
"content": "<|reserved_195|>",
|
1631 |
+
"lstrip": false,
|
1632 |
+
"normalized": false,
|
1633 |
+
"rstrip": false,
|
1634 |
+
"single_word": false,
|
1635 |
+
"special": true
|
1636 |
+
},
|
1637 |
+
"206": {
|
1638 |
+
"content": "<|reserved_196|>",
|
1639 |
+
"lstrip": false,
|
1640 |
+
"normalized": false,
|
1641 |
+
"rstrip": false,
|
1642 |
+
"single_word": false,
|
1643 |
+
"special": true
|
1644 |
+
},
|
1645 |
+
"207": {
|
1646 |
+
"content": "<|reserved_197|>",
|
1647 |
+
"lstrip": false,
|
1648 |
+
"normalized": false,
|
1649 |
+
"rstrip": false,
|
1650 |
+
"single_word": false,
|
1651 |
+
"special": true
|
1652 |
+
},
|
1653 |
+
"208": {
|
1654 |
+
"content": "<|reserved_198|>",
|
1655 |
+
"lstrip": false,
|
1656 |
+
"normalized": false,
|
1657 |
+
"rstrip": false,
|
1658 |
+
"single_word": false,
|
1659 |
+
"special": true
|
1660 |
+
},
|
1661 |
+
"209": {
|
1662 |
+
"content": "<|reserved_199|>",
|
1663 |
+
"lstrip": false,
|
1664 |
+
"normalized": false,
|
1665 |
+
"rstrip": false,
|
1666 |
+
"single_word": false,
|
1667 |
+
"special": true
|
1668 |
+
},
|
1669 |
+
"210": {
|
1670 |
+
"content": "<|reserved_200|>",
|
1671 |
+
"lstrip": false,
|
1672 |
+
"normalized": false,
|
1673 |
+
"rstrip": false,
|
1674 |
+
"single_word": false,
|
1675 |
+
"special": true
|
1676 |
+
},
|
1677 |
+
"211": {
|
1678 |
+
"content": "<|reserved_201|>",
|
1679 |
+
"lstrip": false,
|
1680 |
+
"normalized": false,
|
1681 |
+
"rstrip": false,
|
1682 |
+
"single_word": false,
|
1683 |
+
"special": true
|
1684 |
+
},
|
1685 |
+
"212": {
|
1686 |
+
"content": "<|reserved_202|>",
|
1687 |
+
"lstrip": false,
|
1688 |
+
"normalized": false,
|
1689 |
+
"rstrip": false,
|
1690 |
+
"single_word": false,
|
1691 |
+
"special": true
|
1692 |
+
},
|
1693 |
+
"213": {
|
1694 |
+
"content": "<|reserved_203|>",
|
1695 |
+
"lstrip": false,
|
1696 |
+
"normalized": false,
|
1697 |
+
"rstrip": false,
|
1698 |
+
"single_word": false,
|
1699 |
+
"special": true
|
1700 |
+
},
|
1701 |
+
"214": {
|
1702 |
+
"content": "<|reserved_204|>",
|
1703 |
+
"lstrip": false,
|
1704 |
+
"normalized": false,
|
1705 |
+
"rstrip": false,
|
1706 |
+
"single_word": false,
|
1707 |
+
"special": true
|
1708 |
+
},
|
1709 |
+
"215": {
|
1710 |
+
"content": "<|reserved_205|>",
|
1711 |
+
"lstrip": false,
|
1712 |
+
"normalized": false,
|
1713 |
+
"rstrip": false,
|
1714 |
+
"single_word": false,
|
1715 |
+
"special": true
|
1716 |
+
},
|
1717 |
+
"216": {
|
1718 |
+
"content": "<|reserved_206|>",
|
1719 |
+
"lstrip": false,
|
1720 |
+
"normalized": false,
|
1721 |
+
"rstrip": false,
|
1722 |
+
"single_word": false,
|
1723 |
+
"special": true
|
1724 |
+
},
|
1725 |
+
"217": {
|
1726 |
+
"content": "<|reserved_207|>",
|
1727 |
+
"lstrip": false,
|
1728 |
+
"normalized": false,
|
1729 |
+
"rstrip": false,
|
1730 |
+
"single_word": false,
|
1731 |
+
"special": true
|
1732 |
+
},
|
1733 |
+
"218": {
|
1734 |
+
"content": "<|reserved_208|>",
|
1735 |
+
"lstrip": false,
|
1736 |
+
"normalized": false,
|
1737 |
+
"rstrip": false,
|
1738 |
+
"single_word": false,
|
1739 |
+
"special": true
|
1740 |
+
},
|
1741 |
+
"219": {
|
1742 |
+
"content": "<|reserved_209|>",
|
1743 |
+
"lstrip": false,
|
1744 |
+
"normalized": false,
|
1745 |
+
"rstrip": false,
|
1746 |
+
"single_word": false,
|
1747 |
+
"special": true
|
1748 |
+
},
|
1749 |
+
"220": {
|
1750 |
+
"content": "<|reserved_210|>",
|
1751 |
+
"lstrip": false,
|
1752 |
+
"normalized": false,
|
1753 |
+
"rstrip": false,
|
1754 |
+
"single_word": false,
|
1755 |
+
"special": true
|
1756 |
+
},
|
1757 |
+
"221": {
|
1758 |
+
"content": "<|reserved_211|>",
|
1759 |
+
"lstrip": false,
|
1760 |
+
"normalized": false,
|
1761 |
+
"rstrip": false,
|
1762 |
+
"single_word": false,
|
1763 |
+
"special": true
|
1764 |
+
},
|
1765 |
+
"222": {
|
1766 |
+
"content": "<|reserved_212|>",
|
1767 |
+
"lstrip": false,
|
1768 |
+
"normalized": false,
|
1769 |
+
"rstrip": false,
|
1770 |
+
"single_word": false,
|
1771 |
+
"special": true
|
1772 |
+
},
|
1773 |
+
"223": {
|
1774 |
+
"content": "<|reserved_213|>",
|
1775 |
+
"lstrip": false,
|
1776 |
+
"normalized": false,
|
1777 |
+
"rstrip": false,
|
1778 |
+
"single_word": false,
|
1779 |
+
"special": true
|
1780 |
+
},
|
1781 |
+
"224": {
|
1782 |
+
"content": "<|reserved_214|>",
|
1783 |
+
"lstrip": false,
|
1784 |
+
"normalized": false,
|
1785 |
+
"rstrip": false,
|
1786 |
+
"single_word": false,
|
1787 |
+
"special": true
|
1788 |
+
},
|
1789 |
+
"225": {
|
1790 |
+
"content": "<|reserved_215|>",
|
1791 |
+
"lstrip": false,
|
1792 |
+
"normalized": false,
|
1793 |
+
"rstrip": false,
|
1794 |
+
"single_word": false,
|
1795 |
+
"special": true
|
1796 |
+
},
|
1797 |
+
"226": {
|
1798 |
+
"content": "<|reserved_216|>",
|
1799 |
+
"lstrip": false,
|
1800 |
+
"normalized": false,
|
1801 |
+
"rstrip": false,
|
1802 |
+
"single_word": false,
|
1803 |
+
"special": true
|
1804 |
+
},
|
1805 |
+
"227": {
|
1806 |
+
"content": "<|reserved_217|>",
|
1807 |
+
"lstrip": false,
|
1808 |
+
"normalized": false,
|
1809 |
+
"rstrip": false,
|
1810 |
+
"single_word": false,
|
1811 |
+
"special": true
|
1812 |
+
},
|
1813 |
+
"228": {
|
1814 |
+
"content": "<|reserved_218|>",
|
1815 |
+
"lstrip": false,
|
1816 |
+
"normalized": false,
|
1817 |
+
"rstrip": false,
|
1818 |
+
"single_word": false,
|
1819 |
+
"special": true
|
1820 |
+
},
|
1821 |
+
"229": {
|
1822 |
+
"content": "<|reserved_219|>",
|
1823 |
+
"lstrip": false,
|
1824 |
+
"normalized": false,
|
1825 |
+
"rstrip": false,
|
1826 |
+
"single_word": false,
|
1827 |
+
"special": true
|
1828 |
+
},
|
1829 |
+
"230": {
|
1830 |
+
"content": "<|reserved_220|>",
|
1831 |
+
"lstrip": false,
|
1832 |
+
"normalized": false,
|
1833 |
+
"rstrip": false,
|
1834 |
+
"single_word": false,
|
1835 |
+
"special": true
|
1836 |
+
},
|
1837 |
+
"231": {
|
1838 |
+
"content": "<|reserved_221|>",
|
1839 |
+
"lstrip": false,
|
1840 |
+
"normalized": false,
|
1841 |
+
"rstrip": false,
|
1842 |
+
"single_word": false,
|
1843 |
+
"special": true
|
1844 |
+
},
|
1845 |
+
"232": {
|
1846 |
+
"content": "<|reserved_222|>",
|
1847 |
+
"lstrip": false,
|
1848 |
+
"normalized": false,
|
1849 |
+
"rstrip": false,
|
1850 |
+
"single_word": false,
|
1851 |
+
"special": true
|
1852 |
+
},
|
1853 |
+
"233": {
|
1854 |
+
"content": "<|reserved_223|>",
|
1855 |
+
"lstrip": false,
|
1856 |
+
"normalized": false,
|
1857 |
+
"rstrip": false,
|
1858 |
+
"single_word": false,
|
1859 |
+
"special": true
|
1860 |
+
},
|
1861 |
+
"234": {
|
1862 |
+
"content": "<|reserved_224|>",
|
1863 |
+
"lstrip": false,
|
1864 |
+
"normalized": false,
|
1865 |
+
"rstrip": false,
|
1866 |
+
"single_word": false,
|
1867 |
+
"special": true
|
1868 |
+
},
|
1869 |
+
"235": {
|
1870 |
+
"content": "<|reserved_225|>",
|
1871 |
+
"lstrip": false,
|
1872 |
+
"normalized": false,
|
1873 |
+
"rstrip": false,
|
1874 |
+
"single_word": false,
|
1875 |
+
"special": true
|
1876 |
+
},
|
1877 |
+
"236": {
|
1878 |
+
"content": "<|reserved_226|>",
|
1879 |
+
"lstrip": false,
|
1880 |
+
"normalized": false,
|
1881 |
+
"rstrip": false,
|
1882 |
+
"single_word": false,
|
1883 |
+
"special": true
|
1884 |
+
},
|
1885 |
+
"237": {
|
1886 |
+
"content": "<|reserved_227|>",
|
1887 |
+
"lstrip": false,
|
1888 |
+
"normalized": false,
|
1889 |
+
"rstrip": false,
|
1890 |
+
"single_word": false,
|
1891 |
+
"special": true
|
1892 |
+
},
|
1893 |
+
"238": {
|
1894 |
+
"content": "<|reserved_228|>",
|
1895 |
+
"lstrip": false,
|
1896 |
+
"normalized": false,
|
1897 |
+
"rstrip": false,
|
1898 |
+
"single_word": false,
|
1899 |
+
"special": true
|
1900 |
+
},
|
1901 |
+
"239": {
|
1902 |
+
"content": "<|reserved_229|>",
|
1903 |
+
"lstrip": false,
|
1904 |
+
"normalized": false,
|
1905 |
+
"rstrip": false,
|
1906 |
+
"single_word": false,
|
1907 |
+
"special": true
|
1908 |
+
},
|
1909 |
+
"240": {
|
1910 |
+
"content": "<|reserved_230|>",
|
1911 |
+
"lstrip": false,
|
1912 |
+
"normalized": false,
|
1913 |
+
"rstrip": false,
|
1914 |
+
"single_word": false,
|
1915 |
+
"special": true
|
1916 |
+
},
|
1917 |
+
"241": {
|
1918 |
+
"content": "<|reserved_231|>",
|
1919 |
+
"lstrip": false,
|
1920 |
+
"normalized": false,
|
1921 |
+
"rstrip": false,
|
1922 |
+
"single_word": false,
|
1923 |
+
"special": true
|
1924 |
+
},
|
1925 |
+
"242": {
|
1926 |
+
"content": "<|reserved_232|>",
|
1927 |
+
"lstrip": false,
|
1928 |
+
"normalized": false,
|
1929 |
+
"rstrip": false,
|
1930 |
+
"single_word": false,
|
1931 |
+
"special": true
|
1932 |
+
},
|
1933 |
+
"243": {
|
1934 |
+
"content": "<|reserved_233|>",
|
1935 |
+
"lstrip": false,
|
1936 |
+
"normalized": false,
|
1937 |
+
"rstrip": false,
|
1938 |
+
"single_word": false,
|
1939 |
+
"special": true
|
1940 |
+
},
|
1941 |
+
"244": {
|
1942 |
+
"content": "<|reserved_234|>",
|
1943 |
+
"lstrip": false,
|
1944 |
+
"normalized": false,
|
1945 |
+
"rstrip": false,
|
1946 |
+
"single_word": false,
|
1947 |
+
"special": true
|
1948 |
+
},
|
1949 |
+
"245": {
|
1950 |
+
"content": "<|reserved_235|>",
|
1951 |
+
"lstrip": false,
|
1952 |
+
"normalized": false,
|
1953 |
+
"rstrip": false,
|
1954 |
+
"single_word": false,
|
1955 |
+
"special": true
|
1956 |
+
},
|
1957 |
+
"246": {
|
1958 |
+
"content": "<|reserved_236|>",
|
1959 |
+
"lstrip": false,
|
1960 |
+
"normalized": false,
|
1961 |
+
"rstrip": false,
|
1962 |
+
"single_word": false,
|
1963 |
+
"special": true
|
1964 |
+
},
|
1965 |
+
"247": {
|
1966 |
+
"content": "<|reserved_237|>",
|
1967 |
+
"lstrip": false,
|
1968 |
+
"normalized": false,
|
1969 |
+
"rstrip": false,
|
1970 |
+
"single_word": false,
|
1971 |
+
"special": true
|
1972 |
+
},
|
1973 |
+
"248": {
|
1974 |
+
"content": "<|reserved_238|>",
|
1975 |
+
"lstrip": false,
|
1976 |
+
"normalized": false,
|
1977 |
+
"rstrip": false,
|
1978 |
+
"single_word": false,
|
1979 |
+
"special": true
|
1980 |
+
},
|
1981 |
+
"249": {
|
1982 |
+
"content": "<|reserved_239|>",
|
1983 |
+
"lstrip": false,
|
1984 |
+
"normalized": false,
|
1985 |
+
"rstrip": false,
|
1986 |
+
"single_word": false,
|
1987 |
+
"special": true
|
1988 |
+
},
|
1989 |
+
"250": {
|
1990 |
+
"content": "<|reserved_240|>",
|
1991 |
+
"lstrip": false,
|
1992 |
+
"normalized": false,
|
1993 |
+
"rstrip": false,
|
1994 |
+
"single_word": false,
|
1995 |
+
"special": true
|
1996 |
+
},
|
1997 |
+
"251": {
|
1998 |
+
"content": "<|reserved_241|>",
|
1999 |
+
"lstrip": false,
|
2000 |
+
"normalized": false,
|
2001 |
+
"rstrip": false,
|
2002 |
+
"single_word": false,
|
2003 |
+
"special": true
|
2004 |
+
},
|
2005 |
+
"252": {
|
2006 |
+
"content": "<|reserved_242|>",
|
2007 |
+
"lstrip": false,
|
2008 |
+
"normalized": false,
|
2009 |
+
"rstrip": false,
|
2010 |
+
"single_word": false,
|
2011 |
+
"special": true
|
2012 |
+
},
|
2013 |
+
"253": {
|
2014 |
+
"content": "<|reserved_243|>",
|
2015 |
+
"lstrip": false,
|
2016 |
+
"normalized": false,
|
2017 |
+
"rstrip": false,
|
2018 |
+
"single_word": false,
|
2019 |
+
"special": true
|
2020 |
+
},
|
2021 |
+
"254": {
|
2022 |
+
"content": "<|reserved_244|>",
|
2023 |
+
"lstrip": false,
|
2024 |
+
"normalized": false,
|
2025 |
+
"rstrip": false,
|
2026 |
+
"single_word": false,
|
2027 |
+
"special": true
|
2028 |
+
},
|
2029 |
+
"255": {
|
2030 |
+
"content": "<|reserved_245|>",
|
2031 |
+
"lstrip": false,
|
2032 |
+
"normalized": false,
|
2033 |
+
"rstrip": false,
|
2034 |
+
"single_word": false,
|
2035 |
+
"special": true
|
2036 |
+
},
|
2037 |
+
"256": {
|
2038 |
+
"content": "<|reserved_246|>",
|
2039 |
+
"lstrip": false,
|
2040 |
+
"normalized": false,
|
2041 |
+
"rstrip": false,
|
2042 |
+
"single_word": false,
|
2043 |
+
"special": true
|
2044 |
+
},
|
2045 |
+
"257": {
|
2046 |
+
"content": "<|reserved_247|>",
|
2047 |
+
"lstrip": false,
|
2048 |
+
"normalized": false,
|
2049 |
+
"rstrip": false,
|
2050 |
+
"single_word": false,
|
2051 |
+
"special": true
|
2052 |
+
},
|
2053 |
+
"258": {
|
2054 |
+
"content": "<|reserved_248|>",
|
2055 |
+
"lstrip": false,
|
2056 |
+
"normalized": false,
|
2057 |
+
"rstrip": false,
|
2058 |
+
"single_word": false,
|
2059 |
+
"special": true
|
2060 |
+
},
|
2061 |
+
"259": {
|
2062 |
+
"content": "<|reserved_249|>",
|
2063 |
+
"lstrip": false,
|
2064 |
+
"normalized": false,
|
2065 |
+
"rstrip": false,
|
2066 |
+
"single_word": false,
|
2067 |
+
"special": true
|
2068 |
+
},
|
2069 |
+
"260": {
|
2070 |
+
"content": "<|reserved_250|>",
|
2071 |
+
"lstrip": false,
|
2072 |
+
"normalized": false,
|
2073 |
+
"rstrip": false,
|
2074 |
+
"single_word": false,
|
2075 |
+
"special": true
|
2076 |
+
},
|
2077 |
+
"261": {
|
2078 |
+
"content": "<|reserved_251|>",
|
2079 |
+
"lstrip": false,
|
2080 |
+
"normalized": false,
|
2081 |
+
"rstrip": false,
|
2082 |
+
"single_word": false,
|
2083 |
+
"special": true
|
2084 |
+
},
|
2085 |
+
"262": {
|
2086 |
+
"content": "<|reserved_252|>",
|
2087 |
+
"lstrip": false,
|
2088 |
+
"normalized": false,
|
2089 |
+
"rstrip": false,
|
2090 |
+
"single_word": false,
|
2091 |
+
"special": true
|
2092 |
+
},
|
2093 |
+
"263": {
|
2094 |
+
"content": "<|reserved_253|>",
|
2095 |
+
"lstrip": false,
|
2096 |
+
"normalized": false,
|
2097 |
+
"rstrip": false,
|
2098 |
+
"single_word": false,
|
2099 |
+
"special": true
|
2100 |
+
},
|
2101 |
+
"264": {
|
2102 |
+
"content": "<|reserved_254|>",
|
2103 |
+
"lstrip": false,
|
2104 |
+
"normalized": false,
|
2105 |
+
"rstrip": false,
|
2106 |
+
"single_word": false,
|
2107 |
+
"special": true
|
2108 |
+
},
|
2109 |
+
"265": {
|
2110 |
+
"content": "<|reserved_255|>",
|
2111 |
+
"lstrip": false,
|
2112 |
+
"normalized": false,
|
2113 |
+
"rstrip": false,
|
2114 |
+
"single_word": false,
|
2115 |
+
"special": true
|
2116 |
+
},
|
2117 |
+
"266": {
|
2118 |
+
"content": "<|reserved_256|>",
|
2119 |
+
"lstrip": false,
|
2120 |
+
"normalized": false,
|
2121 |
+
"rstrip": false,
|
2122 |
+
"single_word": false,
|
2123 |
+
"special": true
|
2124 |
+
},
|
2125 |
+
"267": {
|
2126 |
+
"content": "<|reserved_257|>",
|
2127 |
+
"lstrip": false,
|
2128 |
+
"normalized": false,
|
2129 |
+
"rstrip": false,
|
2130 |
+
"single_word": false,
|
2131 |
+
"special": true
|
2132 |
+
},
|
2133 |
+
"268": {
|
2134 |
+
"content": "<|reserved_258|>",
|
2135 |
+
"lstrip": false,
|
2136 |
+
"normalized": false,
|
2137 |
+
"rstrip": false,
|
2138 |
+
"single_word": false,
|
2139 |
+
"special": true
|
2140 |
+
},
|
2141 |
+
"269": {
|
2142 |
+
"content": "<|reserved_259|>",
|
2143 |
+
"lstrip": false,
|
2144 |
+
"normalized": false,
|
2145 |
+
"rstrip": false,
|
2146 |
+
"single_word": false,
|
2147 |
+
"special": true
|
2148 |
+
},
|
2149 |
+
"270": {
|
2150 |
+
"content": "<|reserved_260|>",
|
2151 |
+
"lstrip": false,
|
2152 |
+
"normalized": false,
|
2153 |
+
"rstrip": false,
|
2154 |
+
"single_word": false,
|
2155 |
+
"special": true
|
2156 |
+
},
|
2157 |
+
"271": {
|
2158 |
+
"content": "<|reserved_261|>",
|
2159 |
+
"lstrip": false,
|
2160 |
+
"normalized": false,
|
2161 |
+
"rstrip": false,
|
2162 |
+
"single_word": false,
|
2163 |
+
"special": true
|
2164 |
+
},
|
2165 |
+
"272": {
|
2166 |
+
"content": "<|reserved_262|>",
|
2167 |
+
"lstrip": false,
|
2168 |
+
"normalized": false,
|
2169 |
+
"rstrip": false,
|
2170 |
+
"single_word": false,
|
2171 |
+
"special": true
|
2172 |
+
},
|
2173 |
+
"273": {
|
2174 |
+
"content": "<|reserved_263|>",
|
2175 |
+
"lstrip": false,
|
2176 |
+
"normalized": false,
|
2177 |
+
"rstrip": false,
|
2178 |
+
"single_word": false,
|
2179 |
+
"special": true
|
2180 |
+
},
|
2181 |
+
"274": {
|
2182 |
+
"content": "<|reserved_264|>",
|
2183 |
+
"lstrip": false,
|
2184 |
+
"normalized": false,
|
2185 |
+
"rstrip": false,
|
2186 |
+
"single_word": false,
|
2187 |
+
"special": true
|
2188 |
+
},
|
2189 |
+
"275": {
|
2190 |
+
"content": "<|reserved_265|>",
|
2191 |
+
"lstrip": false,
|
2192 |
+
"normalized": false,
|
2193 |
+
"rstrip": false,
|
2194 |
+
"single_word": false,
|
2195 |
+
"special": true
|
2196 |
+
},
|
2197 |
+
"276": {
|
2198 |
+
"content": "<|reserved_266|>",
|
2199 |
+
"lstrip": false,
|
2200 |
+
"normalized": false,
|
2201 |
+
"rstrip": false,
|
2202 |
+
"single_word": false,
|
2203 |
+
"special": true
|
2204 |
+
},
|
2205 |
+
"277": {
|
2206 |
+
"content": "<|reserved_267|>",
|
2207 |
+
"lstrip": false,
|
2208 |
+
"normalized": false,
|
2209 |
+
"rstrip": false,
|
2210 |
+
"single_word": false,
|
2211 |
+
"special": true
|
2212 |
+
},
|
2213 |
+
"278": {
|
2214 |
+
"content": "<|reserved_268|>",
|
2215 |
+
"lstrip": false,
|
2216 |
+
"normalized": false,
|
2217 |
+
"rstrip": false,
|
2218 |
+
"single_word": false,
|
2219 |
+
"special": true
|
2220 |
+
},
|
2221 |
+
"279": {
|
2222 |
+
"content": "<|reserved_269|>",
|
2223 |
+
"lstrip": false,
|
2224 |
+
"normalized": false,
|
2225 |
+
"rstrip": false,
|
2226 |
+
"single_word": false,
|
2227 |
+
"special": true
|
2228 |
+
},
|
2229 |
+
"280": {
|
2230 |
+
"content": "<|reserved_270|>",
|
2231 |
+
"lstrip": false,
|
2232 |
+
"normalized": false,
|
2233 |
+
"rstrip": false,
|
2234 |
+
"single_word": false,
|
2235 |
+
"special": true
|
2236 |
+
},
|
2237 |
+
"281": {
|
2238 |
+
"content": "<|reserved_271|>",
|
2239 |
+
"lstrip": false,
|
2240 |
+
"normalized": false,
|
2241 |
+
"rstrip": false,
|
2242 |
+
"single_word": false,
|
2243 |
+
"special": true
|
2244 |
+
},
|
2245 |
+
"282": {
|
2246 |
+
"content": "<|reserved_272|>",
|
2247 |
+
"lstrip": false,
|
2248 |
+
"normalized": false,
|
2249 |
+
"rstrip": false,
|
2250 |
+
"single_word": false,
|
2251 |
+
"special": true
|
2252 |
+
},
|
2253 |
+
"283": {
|
2254 |
+
"content": "<|reserved_273|>",
|
2255 |
+
"lstrip": false,
|
2256 |
+
"normalized": false,
|
2257 |
+
"rstrip": false,
|
2258 |
+
"single_word": false,
|
2259 |
+
"special": true
|
2260 |
+
},
|
2261 |
+
"284": {
|
2262 |
+
"content": "<|reserved_274|>",
|
2263 |
+
"lstrip": false,
|
2264 |
+
"normalized": false,
|
2265 |
+
"rstrip": false,
|
2266 |
+
"single_word": false,
|
2267 |
+
"special": true
|
2268 |
+
},
|
2269 |
+
"285": {
|
2270 |
+
"content": "<|reserved_275|>",
|
2271 |
+
"lstrip": false,
|
2272 |
+
"normalized": false,
|
2273 |
+
"rstrip": false,
|
2274 |
+
"single_word": false,
|
2275 |
+
"special": true
|
2276 |
+
},
|
2277 |
+
"286": {
|
2278 |
+
"content": "<|reserved_276|>",
|
2279 |
+
"lstrip": false,
|
2280 |
+
"normalized": false,
|
2281 |
+
"rstrip": false,
|
2282 |
+
"single_word": false,
|
2283 |
+
"special": true
|
2284 |
+
},
|
2285 |
+
"287": {
|
2286 |
+
"content": "<|reserved_277|>",
|
2287 |
+
"lstrip": false,
|
2288 |
+
"normalized": false,
|
2289 |
+
"rstrip": false,
|
2290 |
+
"single_word": false,
|
2291 |
+
"special": true
|
2292 |
+
},
|
2293 |
+
"288": {
|
2294 |
+
"content": "<|reserved_278|>",
|
2295 |
+
"lstrip": false,
|
2296 |
+
"normalized": false,
|
2297 |
+
"rstrip": false,
|
2298 |
+
"single_word": false,
|
2299 |
+
"special": true
|
2300 |
+
},
|
2301 |
+
"289": {
|
2302 |
+
"content": "<|reserved_279|>",
|
2303 |
+
"lstrip": false,
|
2304 |
+
"normalized": false,
|
2305 |
+
"rstrip": false,
|
2306 |
+
"single_word": false,
|
2307 |
+
"special": true
|
2308 |
+
},
|
2309 |
+
"290": {
|
2310 |
+
"content": "<|reserved_280|>",
|
2311 |
+
"lstrip": false,
|
2312 |
+
"normalized": false,
|
2313 |
+
"rstrip": false,
|
2314 |
+
"single_word": false,
|
2315 |
+
"special": true
|
2316 |
+
},
|
2317 |
+
"291": {
|
2318 |
+
"content": "<|reserved_281|>",
|
2319 |
+
"lstrip": false,
|
2320 |
+
"normalized": false,
|
2321 |
+
"rstrip": false,
|
2322 |
+
"single_word": false,
|
2323 |
+
"special": true
|
2324 |
+
},
|
2325 |
+
"292": {
|
2326 |
+
"content": "<|reserved_282|>",
|
2327 |
+
"lstrip": false,
|
2328 |
+
"normalized": false,
|
2329 |
+
"rstrip": false,
|
2330 |
+
"single_word": false,
|
2331 |
+
"special": true
|
2332 |
+
},
|
2333 |
+
"293": {
|
2334 |
+
"content": "<|reserved_283|>",
|
2335 |
+
"lstrip": false,
|
2336 |
+
"normalized": false,
|
2337 |
+
"rstrip": false,
|
2338 |
+
"single_word": false,
|
2339 |
+
"special": true
|
2340 |
+
},
|
2341 |
+
"294": {
|
2342 |
+
"content": "<|reserved_284|>",
|
2343 |
+
"lstrip": false,
|
2344 |
+
"normalized": false,
|
2345 |
+
"rstrip": false,
|
2346 |
+
"single_word": false,
|
2347 |
+
"special": true
|
2348 |
+
},
|
2349 |
+
"295": {
|
2350 |
+
"content": "<|reserved_285|>",
|
2351 |
+
"lstrip": false,
|
2352 |
+
"normalized": false,
|
2353 |
+
"rstrip": false,
|
2354 |
+
"single_word": false,
|
2355 |
+
"special": true
|
2356 |
+
},
|
2357 |
+
"296": {
|
2358 |
+
"content": "<|reserved_286|>",
|
2359 |
+
"lstrip": false,
|
2360 |
+
"normalized": false,
|
2361 |
+
"rstrip": false,
|
2362 |
+
"single_word": false,
|
2363 |
+
"special": true
|
2364 |
+
},
|
2365 |
+
"297": {
|
2366 |
+
"content": "<|reserved_287|>",
|
2367 |
+
"lstrip": false,
|
2368 |
+
"normalized": false,
|
2369 |
+
"rstrip": false,
|
2370 |
+
"single_word": false,
|
2371 |
+
"special": true
|
2372 |
+
},
|
2373 |
+
"298": {
|
2374 |
+
"content": "<|reserved_288|>",
|
2375 |
+
"lstrip": false,
|
2376 |
+
"normalized": false,
|
2377 |
+
"rstrip": false,
|
2378 |
+
"single_word": false,
|
2379 |
+
"special": true
|
2380 |
+
},
|
2381 |
+
"299": {
|
2382 |
+
"content": "<|reserved_289|>",
|
2383 |
+
"lstrip": false,
|
2384 |
+
"normalized": false,
|
2385 |
+
"rstrip": false,
|
2386 |
+
"single_word": false,
|
2387 |
+
"special": true
|
2388 |
+
},
|
2389 |
+
"300": {
|
2390 |
+
"content": "<|reserved_290|>",
|
2391 |
+
"lstrip": false,
|
2392 |
+
"normalized": false,
|
2393 |
+
"rstrip": false,
|
2394 |
+
"single_word": false,
|
2395 |
+
"special": true
|
2396 |
+
},
|
2397 |
+
"301": {
|
2398 |
+
"content": "<|reserved_291|>",
|
2399 |
+
"lstrip": false,
|
2400 |
+
"normalized": false,
|
2401 |
+
"rstrip": false,
|
2402 |
+
"single_word": false,
|
2403 |
+
"special": true
|
2404 |
+
},
|
2405 |
+
"302": {
|
2406 |
+
"content": "<|reserved_292|>",
|
2407 |
+
"lstrip": false,
|
2408 |
+
"normalized": false,
|
2409 |
+
"rstrip": false,
|
2410 |
+
"single_word": false,
|
2411 |
+
"special": true
|
2412 |
+
},
|
2413 |
+
"303": {
|
2414 |
+
"content": "<|reserved_293|>",
|
2415 |
+
"lstrip": false,
|
2416 |
+
"normalized": false,
|
2417 |
+
"rstrip": false,
|
2418 |
+
"single_word": false,
|
2419 |
+
"special": true
|
2420 |
+
},
|
2421 |
+
"304": {
|
2422 |
+
"content": "<|reserved_294|>",
|
2423 |
+
"lstrip": false,
|
2424 |
+
"normalized": false,
|
2425 |
+
"rstrip": false,
|
2426 |
+
"single_word": false,
|
2427 |
+
"special": true
|
2428 |
+
},
|
2429 |
+
"305": {
|
2430 |
+
"content": "<|reserved_295|>",
|
2431 |
+
"lstrip": false,
|
2432 |
+
"normalized": false,
|
2433 |
+
"rstrip": false,
|
2434 |
+
"single_word": false,
|
2435 |
+
"special": true
|
2436 |
+
},
|
2437 |
+
"306": {
|
2438 |
+
"content": "<|reserved_296|>",
|
2439 |
+
"lstrip": false,
|
2440 |
+
"normalized": false,
|
2441 |
+
"rstrip": false,
|
2442 |
+
"single_word": false,
|
2443 |
+
"special": true
|
2444 |
+
},
|
2445 |
+
"307": {
|
2446 |
+
"content": "<|reserved_297|>",
|
2447 |
+
"lstrip": false,
|
2448 |
+
"normalized": false,
|
2449 |
+
"rstrip": false,
|
2450 |
+
"single_word": false,
|
2451 |
+
"special": true
|
2452 |
+
},
|
2453 |
+
"308": {
|
2454 |
+
"content": "<|reserved_298|>",
|
2455 |
+
"lstrip": false,
|
2456 |
+
"normalized": false,
|
2457 |
+
"rstrip": false,
|
2458 |
+
"single_word": false,
|
2459 |
+
"special": true
|
2460 |
+
},
|
2461 |
+
"309": {
|
2462 |
+
"content": "<|reserved_299|>",
|
2463 |
+
"lstrip": false,
|
2464 |
+
"normalized": false,
|
2465 |
+
"rstrip": false,
|
2466 |
+
"single_word": false,
|
2467 |
+
"special": true
|
2468 |
+
},
|
2469 |
+
"310": {
|
2470 |
+
"content": "<|reserved_300|>",
|
2471 |
+
"lstrip": false,
|
2472 |
+
"normalized": false,
|
2473 |
+
"rstrip": false,
|
2474 |
+
"single_word": false,
|
2475 |
+
"special": true
|
2476 |
+
},
|
2477 |
+
"311": {
|
2478 |
+
"content": "<|reserved_301|>",
|
2479 |
+
"lstrip": false,
|
2480 |
+
"normalized": false,
|
2481 |
+
"rstrip": false,
|
2482 |
+
"single_word": false,
|
2483 |
+
"special": true
|
2484 |
+
},
|
2485 |
+
"312": {
|
2486 |
+
"content": "<|reserved_302|>",
|
2487 |
+
"lstrip": false,
|
2488 |
+
"normalized": false,
|
2489 |
+
"rstrip": false,
|
2490 |
+
"single_word": false,
|
2491 |
+
"special": true
|
2492 |
+
},
|
2493 |
+
"313": {
|
2494 |
+
"content": "<|reserved_303|>",
|
2495 |
+
"lstrip": false,
|
2496 |
+
"normalized": false,
|
2497 |
+
"rstrip": false,
|
2498 |
+
"single_word": false,
|
2499 |
+
"special": true
|
2500 |
+
},
|
2501 |
+
"314": {
|
2502 |
+
"content": "<|reserved_304|>",
|
2503 |
+
"lstrip": false,
|
2504 |
+
"normalized": false,
|
2505 |
+
"rstrip": false,
|
2506 |
+
"single_word": false,
|
2507 |
+
"special": true
|
2508 |
+
},
|
2509 |
+
"315": {
|
2510 |
+
"content": "<|reserved_305|>",
|
2511 |
+
"lstrip": false,
|
2512 |
+
"normalized": false,
|
2513 |
+
"rstrip": false,
|
2514 |
+
"single_word": false,
|
2515 |
+
"special": true
|
2516 |
+
},
|
2517 |
+
"316": {
|
2518 |
+
"content": "<|reserved_306|>",
|
2519 |
+
"lstrip": false,
|
2520 |
+
"normalized": false,
|
2521 |
+
"rstrip": false,
|
2522 |
+
"single_word": false,
|
2523 |
+
"special": true
|
2524 |
+
},
|
2525 |
+
"317": {
|
2526 |
+
"content": "<|reserved_307|>",
|
2527 |
+
"lstrip": false,
|
2528 |
+
"normalized": false,
|
2529 |
+
"rstrip": false,
|
2530 |
+
"single_word": false,
|
2531 |
+
"special": true
|
2532 |
+
},
|
2533 |
+
"318": {
|
2534 |
+
"content": "<|reserved_308|>",
|
2535 |
+
"lstrip": false,
|
2536 |
+
"normalized": false,
|
2537 |
+
"rstrip": false,
|
2538 |
+
"single_word": false,
|
2539 |
+
"special": true
|
2540 |
+
},
|
2541 |
+
"319": {
|
2542 |
+
"content": "<|reserved_309|>",
|
2543 |
+
"lstrip": false,
|
2544 |
+
"normalized": false,
|
2545 |
+
"rstrip": false,
|
2546 |
+
"single_word": false,
|
2547 |
+
"special": true
|
2548 |
+
},
|
2549 |
+
"320": {
|
2550 |
+
"content": "<|reserved_310|>",
|
2551 |
+
"lstrip": false,
|
2552 |
+
"normalized": false,
|
2553 |
+
"rstrip": false,
|
2554 |
+
"single_word": false,
|
2555 |
+
"special": true
|
2556 |
+
},
|
2557 |
+
"321": {
|
2558 |
+
"content": "<|reserved_311|>",
|
2559 |
+
"lstrip": false,
|
2560 |
+
"normalized": false,
|
2561 |
+
"rstrip": false,
|
2562 |
+
"single_word": false,
|
2563 |
+
"special": true
|
2564 |
+
},
|
2565 |
+
"322": {
|
2566 |
+
"content": "<|reserved_312|>",
|
2567 |
+
"lstrip": false,
|
2568 |
+
"normalized": false,
|
2569 |
+
"rstrip": false,
|
2570 |
+
"single_word": false,
|
2571 |
+
"special": true
|
2572 |
+
},
|
2573 |
+
"323": {
|
2574 |
+
"content": "<|reserved_313|>",
|
2575 |
+
"lstrip": false,
|
2576 |
+
"normalized": false,
|
2577 |
+
"rstrip": false,
|
2578 |
+
"single_word": false,
|
2579 |
+
"special": true
|
2580 |
+
},
|
2581 |
+
"324": {
|
2582 |
+
"content": "<|reserved_314|>",
|
2583 |
+
"lstrip": false,
|
2584 |
+
"normalized": false,
|
2585 |
+
"rstrip": false,
|
2586 |
+
"single_word": false,
|
2587 |
+
"special": true
|
2588 |
+
},
|
2589 |
+
"325": {
|
2590 |
+
"content": "<|reserved_315|>",
|
2591 |
+
"lstrip": false,
|
2592 |
+
"normalized": false,
|
2593 |
+
"rstrip": false,
|
2594 |
+
"single_word": false,
|
2595 |
+
"special": true
|
2596 |
+
},
|
2597 |
+
"326": {
|
2598 |
+
"content": "<|reserved_316|>",
|
2599 |
+
"lstrip": false,
|
2600 |
+
"normalized": false,
|
2601 |
+
"rstrip": false,
|
2602 |
+
"single_word": false,
|
2603 |
+
"special": true
|
2604 |
+
},
|
2605 |
+
"327": {
|
2606 |
+
"content": "<|reserved_317|>",
|
2607 |
+
"lstrip": false,
|
2608 |
+
"normalized": false,
|
2609 |
+
"rstrip": false,
|
2610 |
+
"single_word": false,
|
2611 |
+
"special": true
|
2612 |
+
},
|
2613 |
+
"328": {
|
2614 |
+
"content": "<|reserved_318|>",
|
2615 |
+
"lstrip": false,
|
2616 |
+
"normalized": false,
|
2617 |
+
"rstrip": false,
|
2618 |
+
"single_word": false,
|
2619 |
+
"special": true
|
2620 |
+
},
|
2621 |
+
"329": {
|
2622 |
+
"content": "<|reserved_319|>",
|
2623 |
+
"lstrip": false,
|
2624 |
+
"normalized": false,
|
2625 |
+
"rstrip": false,
|
2626 |
+
"single_word": false,
|
2627 |
+
"special": true
|
2628 |
+
},
|
2629 |
+
"330": {
|
2630 |
+
"content": "<|reserved_320|>",
|
2631 |
+
"lstrip": false,
|
2632 |
+
"normalized": false,
|
2633 |
+
"rstrip": false,
|
2634 |
+
"single_word": false,
|
2635 |
+
"special": true
|
2636 |
+
},
|
2637 |
+
"331": {
|
2638 |
+
"content": "<|reserved_321|>",
|
2639 |
+
"lstrip": false,
|
2640 |
+
"normalized": false,
|
2641 |
+
"rstrip": false,
|
2642 |
+
"single_word": false,
|
2643 |
+
"special": true
|
2644 |
+
},
|
2645 |
+
"332": {
|
2646 |
+
"content": "<|reserved_322|>",
|
2647 |
+
"lstrip": false,
|
2648 |
+
"normalized": false,
|
2649 |
+
"rstrip": false,
|
2650 |
+
"single_word": false,
|
2651 |
+
"special": true
|
2652 |
+
},
|
2653 |
+
"333": {
|
2654 |
+
"content": "<|reserved_323|>",
|
2655 |
+
"lstrip": false,
|
2656 |
+
"normalized": false,
|
2657 |
+
"rstrip": false,
|
2658 |
+
"single_word": false,
|
2659 |
+
"special": true
|
2660 |
+
},
|
2661 |
+
"334": {
|
2662 |
+
"content": "<|reserved_324|>",
|
2663 |
+
"lstrip": false,
|
2664 |
+
"normalized": false,
|
2665 |
+
"rstrip": false,
|
2666 |
+
"single_word": false,
|
2667 |
+
"special": true
|
2668 |
+
},
|
2669 |
+
"335": {
|
2670 |
+
"content": "<|reserved_325|>",
|
2671 |
+
"lstrip": false,
|
2672 |
+
"normalized": false,
|
2673 |
+
"rstrip": false,
|
2674 |
+
"single_word": false,
|
2675 |
+
"special": true
|
2676 |
+
},
|
2677 |
+
"336": {
|
2678 |
+
"content": "<|reserved_326|>",
|
2679 |
+
"lstrip": false,
|
2680 |
+
"normalized": false,
|
2681 |
+
"rstrip": false,
|
2682 |
+
"single_word": false,
|
2683 |
+
"special": true
|
2684 |
+
},
|
2685 |
+
"337": {
|
2686 |
+
"content": "<|reserved_327|>",
|
2687 |
+
"lstrip": false,
|
2688 |
+
"normalized": false,
|
2689 |
+
"rstrip": false,
|
2690 |
+
"single_word": false,
|
2691 |
+
"special": true
|
2692 |
+
},
|
2693 |
+
"338": {
|
2694 |
+
"content": "<|reserved_328|>",
|
2695 |
+
"lstrip": false,
|
2696 |
+
"normalized": false,
|
2697 |
+
"rstrip": false,
|
2698 |
+
"single_word": false,
|
2699 |
+
"special": true
|
2700 |
+
},
|
2701 |
+
"339": {
|
2702 |
+
"content": "<|reserved_329|>",
|
2703 |
+
"lstrip": false,
|
2704 |
+
"normalized": false,
|
2705 |
+
"rstrip": false,
|
2706 |
+
"single_word": false,
|
2707 |
+
"special": true
|
2708 |
+
},
|
2709 |
+
"340": {
|
2710 |
+
"content": "<|reserved_330|>",
|
2711 |
+
"lstrip": false,
|
2712 |
+
"normalized": false,
|
2713 |
+
"rstrip": false,
|
2714 |
+
"single_word": false,
|
2715 |
+
"special": true
|
2716 |
+
},
|
2717 |
+
"341": {
|
2718 |
+
"content": "<|reserved_331|>",
|
2719 |
+
"lstrip": false,
|
2720 |
+
"normalized": false,
|
2721 |
+
"rstrip": false,
|
2722 |
+
"single_word": false,
|
2723 |
+
"special": true
|
2724 |
+
},
|
2725 |
+
"342": {
|
2726 |
+
"content": "<|reserved_332|>",
|
2727 |
+
"lstrip": false,
|
2728 |
+
"normalized": false,
|
2729 |
+
"rstrip": false,
|
2730 |
+
"single_word": false,
|
2731 |
+
"special": true
|
2732 |
+
},
|
2733 |
+
"343": {
|
2734 |
+
"content": "<|reserved_333|>",
|
2735 |
+
"lstrip": false,
|
2736 |
+
"normalized": false,
|
2737 |
+
"rstrip": false,
|
2738 |
+
"single_word": false,
|
2739 |
+
"special": true
|
2740 |
+
},
|
2741 |
+
"344": {
|
2742 |
+
"content": "<|reserved_334|>",
|
2743 |
+
"lstrip": false,
|
2744 |
+
"normalized": false,
|
2745 |
+
"rstrip": false,
|
2746 |
+
"single_word": false,
|
2747 |
+
"special": true
|
2748 |
+
},
|
2749 |
+
"345": {
|
2750 |
+
"content": "<|reserved_335|>",
|
2751 |
+
"lstrip": false,
|
2752 |
+
"normalized": false,
|
2753 |
+
"rstrip": false,
|
2754 |
+
"single_word": false,
|
2755 |
+
"special": true
|
2756 |
+
},
|
2757 |
+
"346": {
|
2758 |
+
"content": "<|reserved_336|>",
|
2759 |
+
"lstrip": false,
|
2760 |
+
"normalized": false,
|
2761 |
+
"rstrip": false,
|
2762 |
+
"single_word": false,
|
2763 |
+
"special": true
|
2764 |
+
},
|
2765 |
+
"347": {
|
2766 |
+
"content": "<|reserved_337|>",
|
2767 |
+
"lstrip": false,
|
2768 |
+
"normalized": false,
|
2769 |
+
"rstrip": false,
|
2770 |
+
"single_word": false,
|
2771 |
+
"special": true
|
2772 |
+
},
|
2773 |
+
"348": {
|
2774 |
+
"content": "<|reserved_338|>",
|
2775 |
+
"lstrip": false,
|
2776 |
+
"normalized": false,
|
2777 |
+
"rstrip": false,
|
2778 |
+
"single_word": false,
|
2779 |
+
"special": true
|
2780 |
+
},
|
2781 |
+
"349": {
|
2782 |
+
"content": "<|reserved_339|>",
|
2783 |
+
"lstrip": false,
|
2784 |
+
"normalized": false,
|
2785 |
+
"rstrip": false,
|
2786 |
+
"single_word": false,
|
2787 |
+
"special": true
|
2788 |
+
},
|
2789 |
+
"350": {
|
2790 |
+
"content": "<|reserved_340|>",
|
2791 |
+
"lstrip": false,
|
2792 |
+
"normalized": false,
|
2793 |
+
"rstrip": false,
|
2794 |
+
"single_word": false,
|
2795 |
+
"special": true
|
2796 |
+
},
|
2797 |
+
"351": {
|
2798 |
+
"content": "<|reserved_341|>",
|
2799 |
+
"lstrip": false,
|
2800 |
+
"normalized": false,
|
2801 |
+
"rstrip": false,
|
2802 |
+
"single_word": false,
|
2803 |
+
"special": true
|
2804 |
+
},
|
2805 |
+
"352": {
|
2806 |
+
"content": "<|reserved_342|>",
|
2807 |
+
"lstrip": false,
|
2808 |
+
"normalized": false,
|
2809 |
+
"rstrip": false,
|
2810 |
+
"single_word": false,
|
2811 |
+
"special": true
|
2812 |
+
},
|
2813 |
+
"353": {
|
2814 |
+
"content": "<|reserved_343|>",
|
2815 |
+
"lstrip": false,
|
2816 |
+
"normalized": false,
|
2817 |
+
"rstrip": false,
|
2818 |
+
"single_word": false,
|
2819 |
+
"special": true
|
2820 |
+
},
|
2821 |
+
"354": {
|
2822 |
+
"content": "<|reserved_344|>",
|
2823 |
+
"lstrip": false,
|
2824 |
+
"normalized": false,
|
2825 |
+
"rstrip": false,
|
2826 |
+
"single_word": false,
|
2827 |
+
"special": true
|
2828 |
+
},
|
2829 |
+
"355": {
|
2830 |
+
"content": "<|reserved_345|>",
|
2831 |
+
"lstrip": false,
|
2832 |
+
"normalized": false,
|
2833 |
+
"rstrip": false,
|
2834 |
+
"single_word": false,
|
2835 |
+
"special": true
|
2836 |
+
},
|
2837 |
+
"356": {
|
2838 |
+
"content": "<|reserved_346|>",
|
2839 |
+
"lstrip": false,
|
2840 |
+
"normalized": false,
|
2841 |
+
"rstrip": false,
|
2842 |
+
"single_word": false,
|
2843 |
+
"special": true
|
2844 |
+
},
|
2845 |
+
"357": {
|
2846 |
+
"content": "<|reserved_347|>",
|
2847 |
+
"lstrip": false,
|
2848 |
+
"normalized": false,
|
2849 |
+
"rstrip": false,
|
2850 |
+
"single_word": false,
|
2851 |
+
"special": true
|
2852 |
+
},
|
2853 |
+
"358": {
|
2854 |
+
"content": "<|reserved_348|>",
|
2855 |
+
"lstrip": false,
|
2856 |
+
"normalized": false,
|
2857 |
+
"rstrip": false,
|
2858 |
+
"single_word": false,
|
2859 |
+
"special": true
|
2860 |
+
},
|
2861 |
+
"359": {
|
2862 |
+
"content": "<|reserved_349|>",
|
2863 |
+
"lstrip": false,
|
2864 |
+
"normalized": false,
|
2865 |
+
"rstrip": false,
|
2866 |
+
"single_word": false,
|
2867 |
+
"special": true
|
2868 |
+
},
|
2869 |
+
"360": {
|
2870 |
+
"content": "<|reserved_350|>",
|
2871 |
+
"lstrip": false,
|
2872 |
+
"normalized": false,
|
2873 |
+
"rstrip": false,
|
2874 |
+
"single_word": false,
|
2875 |
+
"special": true
|
2876 |
+
},
|
2877 |
+
"361": {
|
2878 |
+
"content": "<|reserved_351|>",
|
2879 |
+
"lstrip": false,
|
2880 |
+
"normalized": false,
|
2881 |
+
"rstrip": false,
|
2882 |
+
"single_word": false,
|
2883 |
+
"special": true
|
2884 |
+
},
|
2885 |
+
"362": {
|
2886 |
+
"content": "<|reserved_352|>",
|
2887 |
+
"lstrip": false,
|
2888 |
+
"normalized": false,
|
2889 |
+
"rstrip": false,
|
2890 |
+
"single_word": false,
|
2891 |
+
"special": true
|
2892 |
+
},
|
2893 |
+
"363": {
|
2894 |
+
"content": "<|reserved_353|>",
|
2895 |
+
"lstrip": false,
|
2896 |
+
"normalized": false,
|
2897 |
+
"rstrip": false,
|
2898 |
+
"single_word": false,
|
2899 |
+
"special": true
|
2900 |
+
},
|
2901 |
+
"364": {
|
2902 |
+
"content": "<|reserved_354|>",
|
2903 |
+
"lstrip": false,
|
2904 |
+
"normalized": false,
|
2905 |
+
"rstrip": false,
|
2906 |
+
"single_word": false,
|
2907 |
+
"special": true
|
2908 |
+
},
|
2909 |
+
"365": {
|
2910 |
+
"content": "<|reserved_355|>",
|
2911 |
+
"lstrip": false,
|
2912 |
+
"normalized": false,
|
2913 |
+
"rstrip": false,
|
2914 |
+
"single_word": false,
|
2915 |
+
"special": true
|
2916 |
+
},
|
2917 |
+
"366": {
|
2918 |
+
"content": "<|reserved_356|>",
|
2919 |
+
"lstrip": false,
|
2920 |
+
"normalized": false,
|
2921 |
+
"rstrip": false,
|
2922 |
+
"single_word": false,
|
2923 |
+
"special": true
|
2924 |
+
},
|
2925 |
+
"367": {
|
2926 |
+
"content": "<|reserved_357|>",
|
2927 |
+
"lstrip": false,
|
2928 |
+
"normalized": false,
|
2929 |
+
"rstrip": false,
|
2930 |
+
"single_word": false,
|
2931 |
+
"special": true
|
2932 |
+
},
|
2933 |
+
"368": {
|
2934 |
+
"content": "<|reserved_358|>",
|
2935 |
+
"lstrip": false,
|
2936 |
+
"normalized": false,
|
2937 |
+
"rstrip": false,
|
2938 |
+
"single_word": false,
|
2939 |
+
"special": true
|
2940 |
+
},
|
2941 |
+
"369": {
|
2942 |
+
"content": "<|reserved_359|>",
|
2943 |
+
"lstrip": false,
|
2944 |
+
"normalized": false,
|
2945 |
+
"rstrip": false,
|
2946 |
+
"single_word": false,
|
2947 |
+
"special": true
|
2948 |
+
},
|
2949 |
+
"370": {
|
2950 |
+
"content": "<|reserved_360|>",
|
2951 |
+
"lstrip": false,
|
2952 |
+
"normalized": false,
|
2953 |
+
"rstrip": false,
|
2954 |
+
"single_word": false,
|
2955 |
+
"special": true
|
2956 |
+
},
|
2957 |
+
"371": {
|
2958 |
+
"content": "<|reserved_361|>",
|
2959 |
+
"lstrip": false,
|
2960 |
+
"normalized": false,
|
2961 |
+
"rstrip": false,
|
2962 |
+
"single_word": false,
|
2963 |
+
"special": true
|
2964 |
+
},
|
2965 |
+
"372": {
|
2966 |
+
"content": "<|reserved_362|>",
|
2967 |
+
"lstrip": false,
|
2968 |
+
"normalized": false,
|
2969 |
+
"rstrip": false,
|
2970 |
+
"single_word": false,
|
2971 |
+
"special": true
|
2972 |
+
},
|
2973 |
+
"373": {
|
2974 |
+
"content": "<|reserved_363|>",
|
2975 |
+
"lstrip": false,
|
2976 |
+
"normalized": false,
|
2977 |
+
"rstrip": false,
|
2978 |
+
"single_word": false,
|
2979 |
+
"special": true
|
2980 |
+
},
|
2981 |
+
"374": {
|
2982 |
+
"content": "<|reserved_364|>",
|
2983 |
+
"lstrip": false,
|
2984 |
+
"normalized": false,
|
2985 |
+
"rstrip": false,
|
2986 |
+
"single_word": false,
|
2987 |
+
"special": true
|
2988 |
+
},
|
2989 |
+
"375": {
|
2990 |
+
"content": "<|reserved_365|>",
|
2991 |
+
"lstrip": false,
|
2992 |
+
"normalized": false,
|
2993 |
+
"rstrip": false,
|
2994 |
+
"single_word": false,
|
2995 |
+
"special": true
|
2996 |
+
},
|
2997 |
+
"376": {
|
2998 |
+
"content": "<|reserved_366|>",
|
2999 |
+
"lstrip": false,
|
3000 |
+
"normalized": false,
|
3001 |
+
"rstrip": false,
|
3002 |
+
"single_word": false,
|
3003 |
+
"special": true
|
3004 |
+
},
|
3005 |
+
"377": {
|
3006 |
+
"content": "<|reserved_367|>",
|
3007 |
+
"lstrip": false,
|
3008 |
+
"normalized": false,
|
3009 |
+
"rstrip": false,
|
3010 |
+
"single_word": false,
|
3011 |
+
"special": true
|
3012 |
+
},
|
3013 |
+
"378": {
|
3014 |
+
"content": "<|reserved_368|>",
|
3015 |
+
"lstrip": false,
|
3016 |
+
"normalized": false,
|
3017 |
+
"rstrip": false,
|
3018 |
+
"single_word": false,
|
3019 |
+
"special": true
|
3020 |
+
},
|
3021 |
+
"379": {
|
3022 |
+
"content": "<|reserved_369|>",
|
3023 |
+
"lstrip": false,
|
3024 |
+
"normalized": false,
|
3025 |
+
"rstrip": false,
|
3026 |
+
"single_word": false,
|
3027 |
+
"special": true
|
3028 |
+
},
|
3029 |
+
"380": {
|
3030 |
+
"content": "<|reserved_370|>",
|
3031 |
+
"lstrip": false,
|
3032 |
+
"normalized": false,
|
3033 |
+
"rstrip": false,
|
3034 |
+
"single_word": false,
|
3035 |
+
"special": true
|
3036 |
+
},
|
3037 |
+
"381": {
|
3038 |
+
"content": "<|reserved_371|>",
|
3039 |
+
"lstrip": false,
|
3040 |
+
"normalized": false,
|
3041 |
+
"rstrip": false,
|
3042 |
+
"single_word": false,
|
3043 |
+
"special": true
|
3044 |
+
},
|
3045 |
+
"382": {
|
3046 |
+
"content": "<|reserved_372|>",
|
3047 |
+
"lstrip": false,
|
3048 |
+
"normalized": false,
|
3049 |
+
"rstrip": false,
|
3050 |
+
"single_word": false,
|
3051 |
+
"special": true
|
3052 |
+
},
|
3053 |
+
"383": {
|
3054 |
+
"content": "<|reserved_373|>",
|
3055 |
+
"lstrip": false,
|
3056 |
+
"normalized": false,
|
3057 |
+
"rstrip": false,
|
3058 |
+
"single_word": false,
|
3059 |
+
"special": true
|
3060 |
+
},
|
3061 |
+
"384": {
|
3062 |
+
"content": "<|reserved_374|>",
|
3063 |
+
"lstrip": false,
|
3064 |
+
"normalized": false,
|
3065 |
+
"rstrip": false,
|
3066 |
+
"single_word": false,
|
3067 |
+
"special": true
|
3068 |
+
},
|
3069 |
+
"385": {
|
3070 |
+
"content": "<|reserved_375|>",
|
3071 |
+
"lstrip": false,
|
3072 |
+
"normalized": false,
|
3073 |
+
"rstrip": false,
|
3074 |
+
"single_word": false,
|
3075 |
+
"special": true
|
3076 |
+
},
|
3077 |
+
"386": {
|
3078 |
+
"content": "<|reserved_376|>",
|
3079 |
+
"lstrip": false,
|
3080 |
+
"normalized": false,
|
3081 |
+
"rstrip": false,
|
3082 |
+
"single_word": false,
|
3083 |
+
"special": true
|
3084 |
+
},
|
3085 |
+
"387": {
|
3086 |
+
"content": "<|reserved_377|>",
|
3087 |
+
"lstrip": false,
|
3088 |
+
"normalized": false,
|
3089 |
+
"rstrip": false,
|
3090 |
+
"single_word": false,
|
3091 |
+
"special": true
|
3092 |
+
},
|
3093 |
+
"388": {
|
3094 |
+
"content": "<|reserved_378|>",
|
3095 |
+
"lstrip": false,
|
3096 |
+
"normalized": false,
|
3097 |
+
"rstrip": false,
|
3098 |
+
"single_word": false,
|
3099 |
+
"special": true
|
3100 |
+
},
|
3101 |
+
"389": {
|
3102 |
+
"content": "<|reserved_379|>",
|
3103 |
+
"lstrip": false,
|
3104 |
+
"normalized": false,
|
3105 |
+
"rstrip": false,
|
3106 |
+
"single_word": false,
|
3107 |
+
"special": true
|
3108 |
+
},
|
3109 |
+
"390": {
|
3110 |
+
"content": "<|reserved_380|>",
|
3111 |
+
"lstrip": false,
|
3112 |
+
"normalized": false,
|
3113 |
+
"rstrip": false,
|
3114 |
+
"single_word": false,
|
3115 |
+
"special": true
|
3116 |
+
},
|
3117 |
+
"391": {
|
3118 |
+
"content": "<|reserved_381|>",
|
3119 |
+
"lstrip": false,
|
3120 |
+
"normalized": false,
|
3121 |
+
"rstrip": false,
|
3122 |
+
"single_word": false,
|
3123 |
+
"special": true
|
3124 |
+
},
|
3125 |
+
"392": {
|
3126 |
+
"content": "<|reserved_382|>",
|
3127 |
+
"lstrip": false,
|
3128 |
+
"normalized": false,
|
3129 |
+
"rstrip": false,
|
3130 |
+
"single_word": false,
|
3131 |
+
"special": true
|
3132 |
+
},
|
3133 |
+
"393": {
|
3134 |
+
"content": "<|reserved_383|>",
|
3135 |
+
"lstrip": false,
|
3136 |
+
"normalized": false,
|
3137 |
+
"rstrip": false,
|
3138 |
+
"single_word": false,
|
3139 |
+
"special": true
|
3140 |
+
},
|
3141 |
+
"394": {
|
3142 |
+
"content": "<|reserved_384|>",
|
3143 |
+
"lstrip": false,
|
3144 |
+
"normalized": false,
|
3145 |
+
"rstrip": false,
|
3146 |
+
"single_word": false,
|
3147 |
+
"special": true
|
3148 |
+
},
|
3149 |
+
"395": {
|
3150 |
+
"content": "<|reserved_385|>",
|
3151 |
+
"lstrip": false,
|
3152 |
+
"normalized": false,
|
3153 |
+
"rstrip": false,
|
3154 |
+
"single_word": false,
|
3155 |
+
"special": true
|
3156 |
+
},
|
3157 |
+
"396": {
|
3158 |
+
"content": "<|reserved_386|>",
|
3159 |
+
"lstrip": false,
|
3160 |
+
"normalized": false,
|
3161 |
+
"rstrip": false,
|
3162 |
+
"single_word": false,
|
3163 |
+
"special": true
|
3164 |
+
},
|
3165 |
+
"397": {
|
3166 |
+
"content": "<|reserved_387|>",
|
3167 |
+
"lstrip": false,
|
3168 |
+
"normalized": false,
|
3169 |
+
"rstrip": false,
|
3170 |
+
"single_word": false,
|
3171 |
+
"special": true
|
3172 |
+
},
|
3173 |
+
"398": {
|
3174 |
+
"content": "<|reserved_388|>",
|
3175 |
+
"lstrip": false,
|
3176 |
+
"normalized": false,
|
3177 |
+
"rstrip": false,
|
3178 |
+
"single_word": false,
|
3179 |
+
"special": true
|
3180 |
+
},
|
3181 |
+
"399": {
|
3182 |
+
"content": "<|reserved_389|>",
|
3183 |
+
"lstrip": false,
|
3184 |
+
"normalized": false,
|
3185 |
+
"rstrip": false,
|
3186 |
+
"single_word": false,
|
3187 |
+
"special": true
|
3188 |
+
},
|
3189 |
+
"400": {
|
3190 |
+
"content": "<|reserved_390|>",
|
3191 |
+
"lstrip": false,
|
3192 |
+
"normalized": false,
|
3193 |
+
"rstrip": false,
|
3194 |
+
"single_word": false,
|
3195 |
+
"special": true
|
3196 |
+
},
|
3197 |
+
"401": {
|
3198 |
+
"content": "<|reserved_391|>",
|
3199 |
+
"lstrip": false,
|
3200 |
+
"normalized": false,
|
3201 |
+
"rstrip": false,
|
3202 |
+
"single_word": false,
|
3203 |
+
"special": true
|
3204 |
+
},
|
3205 |
+
"402": {
|
3206 |
+
"content": "<|reserved_392|>",
|
3207 |
+
"lstrip": false,
|
3208 |
+
"normalized": false,
|
3209 |
+
"rstrip": false,
|
3210 |
+
"single_word": false,
|
3211 |
+
"special": true
|
3212 |
+
},
|
3213 |
+
"403": {
|
3214 |
+
"content": "<|reserved_393|>",
|
3215 |
+
"lstrip": false,
|
3216 |
+
"normalized": false,
|
3217 |
+
"rstrip": false,
|
3218 |
+
"single_word": false,
|
3219 |
+
"special": true
|
3220 |
+
},
|
3221 |
+
"404": {
|
3222 |
+
"content": "<|reserved_394|>",
|
3223 |
+
"lstrip": false,
|
3224 |
+
"normalized": false,
|
3225 |
+
"rstrip": false,
|
3226 |
+
"single_word": false,
|
3227 |
+
"special": true
|
3228 |
+
},
|
3229 |
+
"405": {
|
3230 |
+
"content": "<|reserved_395|>",
|
3231 |
+
"lstrip": false,
|
3232 |
+
"normalized": false,
|
3233 |
+
"rstrip": false,
|
3234 |
+
"single_word": false,
|
3235 |
+
"special": true
|
3236 |
+
},
|
3237 |
+
"406": {
|
3238 |
+
"content": "<|reserved_396|>",
|
3239 |
+
"lstrip": false,
|
3240 |
+
"normalized": false,
|
3241 |
+
"rstrip": false,
|
3242 |
+
"single_word": false,
|
3243 |
+
"special": true
|
3244 |
+
},
|
3245 |
+
"407": {
|
3246 |
+
"content": "<|reserved_397|>",
|
3247 |
+
"lstrip": false,
|
3248 |
+
"normalized": false,
|
3249 |
+
"rstrip": false,
|
3250 |
+
"single_word": false,
|
3251 |
+
"special": true
|
3252 |
+
},
|
3253 |
+
"408": {
|
3254 |
+
"content": "<|reserved_398|>",
|
3255 |
+
"lstrip": false,
|
3256 |
+
"normalized": false,
|
3257 |
+
"rstrip": false,
|
3258 |
+
"single_word": false,
|
3259 |
+
"special": true
|
3260 |
+
},
|
3261 |
+
"409": {
|
3262 |
+
"content": "<|reserved_399|>",
|
3263 |
+
"lstrip": false,
|
3264 |
+
"normalized": false,
|
3265 |
+
"rstrip": false,
|
3266 |
+
"single_word": false,
|
3267 |
+
"special": true
|
3268 |
+
},
|
3269 |
+
"410": {
|
3270 |
+
"content": "<|reserved_400|>",
|
3271 |
+
"lstrip": false,
|
3272 |
+
"normalized": false,
|
3273 |
+
"rstrip": false,
|
3274 |
+
"single_word": false,
|
3275 |
+
"special": true
|
3276 |
+
},
|
3277 |
+
"411": {
|
3278 |
+
"content": "<|reserved_401|>",
|
3279 |
+
"lstrip": false,
|
3280 |
+
"normalized": false,
|
3281 |
+
"rstrip": false,
|
3282 |
+
"single_word": false,
|
3283 |
+
"special": true
|
3284 |
+
},
|
3285 |
+
"412": {
|
3286 |
+
"content": "<|reserved_402|>",
|
3287 |
+
"lstrip": false,
|
3288 |
+
"normalized": false,
|
3289 |
+
"rstrip": false,
|
3290 |
+
"single_word": false,
|
3291 |
+
"special": true
|
3292 |
+
},
|
3293 |
+
"413": {
|
3294 |
+
"content": "<|reserved_403|>",
|
3295 |
+
"lstrip": false,
|
3296 |
+
"normalized": false,
|
3297 |
+
"rstrip": false,
|
3298 |
+
"single_word": false,
|
3299 |
+
"special": true
|
3300 |
+
},
|
3301 |
+
"414": {
|
3302 |
+
"content": "<|reserved_404|>",
|
3303 |
+
"lstrip": false,
|
3304 |
+
"normalized": false,
|
3305 |
+
"rstrip": false,
|
3306 |
+
"single_word": false,
|
3307 |
+
"special": true
|
3308 |
+
},
|
3309 |
+
"415": {
|
3310 |
+
"content": "<|reserved_405|>",
|
3311 |
+
"lstrip": false,
|
3312 |
+
"normalized": false,
|
3313 |
+
"rstrip": false,
|
3314 |
+
"single_word": false,
|
3315 |
+
"special": true
|
3316 |
+
},
|
3317 |
+
"416": {
|
3318 |
+
"content": "<|reserved_406|>",
|
3319 |
+
"lstrip": false,
|
3320 |
+
"normalized": false,
|
3321 |
+
"rstrip": false,
|
3322 |
+
"single_word": false,
|
3323 |
+
"special": true
|
3324 |
+
},
|
3325 |
+
"417": {
|
3326 |
+
"content": "<|reserved_407|>",
|
3327 |
+
"lstrip": false,
|
3328 |
+
"normalized": false,
|
3329 |
+
"rstrip": false,
|
3330 |
+
"single_word": false,
|
3331 |
+
"special": true
|
3332 |
+
},
|
3333 |
+
"418": {
|
3334 |
+
"content": "<|reserved_408|>",
|
3335 |
+
"lstrip": false,
|
3336 |
+
"normalized": false,
|
3337 |
+
"rstrip": false,
|
3338 |
+
"single_word": false,
|
3339 |
+
"special": true
|
3340 |
+
},
|
3341 |
+
"419": {
|
3342 |
+
"content": "<|reserved_409|>",
|
3343 |
+
"lstrip": false,
|
3344 |
+
"normalized": false,
|
3345 |
+
"rstrip": false,
|
3346 |
+
"single_word": false,
|
3347 |
+
"special": true
|
3348 |
+
},
|
3349 |
+
"420": {
|
3350 |
+
"content": "<|reserved_410|>",
|
3351 |
+
"lstrip": false,
|
3352 |
+
"normalized": false,
|
3353 |
+
"rstrip": false,
|
3354 |
+
"single_word": false,
|
3355 |
+
"special": true
|
3356 |
+
},
|
3357 |
+
"421": {
|
3358 |
+
"content": "<|reserved_411|>",
|
3359 |
+
"lstrip": false,
|
3360 |
+
"normalized": false,
|
3361 |
+
"rstrip": false,
|
3362 |
+
"single_word": false,
|
3363 |
+
"special": true
|
3364 |
+
},
|
3365 |
+
"422": {
|
3366 |
+
"content": "<|reserved_412|>",
|
3367 |
+
"lstrip": false,
|
3368 |
+
"normalized": false,
|
3369 |
+
"rstrip": false,
|
3370 |
+
"single_word": false,
|
3371 |
+
"special": true
|
3372 |
+
},
|
3373 |
+
"423": {
|
3374 |
+
"content": "<|reserved_413|>",
|
3375 |
+
"lstrip": false,
|
3376 |
+
"normalized": false,
|
3377 |
+
"rstrip": false,
|
3378 |
+
"single_word": false,
|
3379 |
+
"special": true
|
3380 |
+
},
|
3381 |
+
"424": {
|
3382 |
+
"content": "<|reserved_414|>",
|
3383 |
+
"lstrip": false,
|
3384 |
+
"normalized": false,
|
3385 |
+
"rstrip": false,
|
3386 |
+
"single_word": false,
|
3387 |
+
"special": true
|
3388 |
+
},
|
3389 |
+
"425": {
|
3390 |
+
"content": "<|reserved_415|>",
|
3391 |
+
"lstrip": false,
|
3392 |
+
"normalized": false,
|
3393 |
+
"rstrip": false,
|
3394 |
+
"single_word": false,
|
3395 |
+
"special": true
|
3396 |
+
},
|
3397 |
+
"426": {
|
3398 |
+
"content": "<|reserved_416|>",
|
3399 |
+
"lstrip": false,
|
3400 |
+
"normalized": false,
|
3401 |
+
"rstrip": false,
|
3402 |
+
"single_word": false,
|
3403 |
+
"special": true
|
3404 |
+
},
|
3405 |
+
"427": {
|
3406 |
+
"content": "<|reserved_417|>",
|
3407 |
+
"lstrip": false,
|
3408 |
+
"normalized": false,
|
3409 |
+
"rstrip": false,
|
3410 |
+
"single_word": false,
|
3411 |
+
"special": true
|
3412 |
+
},
|
3413 |
+
"428": {
|
3414 |
+
"content": "<|reserved_418|>",
|
3415 |
+
"lstrip": false,
|
3416 |
+
"normalized": false,
|
3417 |
+
"rstrip": false,
|
3418 |
+
"single_word": false,
|
3419 |
+
"special": true
|
3420 |
+
},
|
3421 |
+
"429": {
|
3422 |
+
"content": "<|reserved_419|>",
|
3423 |
+
"lstrip": false,
|
3424 |
+
"normalized": false,
|
3425 |
+
"rstrip": false,
|
3426 |
+
"single_word": false,
|
3427 |
+
"special": true
|
3428 |
+
},
|
3429 |
+
"430": {
|
3430 |
+
"content": "<|reserved_420|>",
|
3431 |
+
"lstrip": false,
|
3432 |
+
"normalized": false,
|
3433 |
+
"rstrip": false,
|
3434 |
+
"single_word": false,
|
3435 |
+
"special": true
|
3436 |
+
},
|
3437 |
+
"431": {
|
3438 |
+
"content": "<|reserved_421|>",
|
3439 |
+
"lstrip": false,
|
3440 |
+
"normalized": false,
|
3441 |
+
"rstrip": false,
|
3442 |
+
"single_word": false,
|
3443 |
+
"special": true
|
3444 |
+
},
|
3445 |
+
"432": {
|
3446 |
+
"content": "<|reserved_422|>",
|
3447 |
+
"lstrip": false,
|
3448 |
+
"normalized": false,
|
3449 |
+
"rstrip": false,
|
3450 |
+
"single_word": false,
|
3451 |
+
"special": true
|
3452 |
+
},
|
3453 |
+
"433": {
|
3454 |
+
"content": "<|reserved_423|>",
|
3455 |
+
"lstrip": false,
|
3456 |
+
"normalized": false,
|
3457 |
+
"rstrip": false,
|
3458 |
+
"single_word": false,
|
3459 |
+
"special": true
|
3460 |
+
},
|
3461 |
+
"434": {
|
3462 |
+
"content": "<|reserved_424|>",
|
3463 |
+
"lstrip": false,
|
3464 |
+
"normalized": false,
|
3465 |
+
"rstrip": false,
|
3466 |
+
"single_word": false,
|
3467 |
+
"special": true
|
3468 |
+
},
|
3469 |
+
"435": {
|
3470 |
+
"content": "<|reserved_425|>",
|
3471 |
+
"lstrip": false,
|
3472 |
+
"normalized": false,
|
3473 |
+
"rstrip": false,
|
3474 |
+
"single_word": false,
|
3475 |
+
"special": true
|
3476 |
+
},
|
3477 |
+
"436": {
|
3478 |
+
"content": "<|reserved_426|>",
|
3479 |
+
"lstrip": false,
|
3480 |
+
"normalized": false,
|
3481 |
+
"rstrip": false,
|
3482 |
+
"single_word": false,
|
3483 |
+
"special": true
|
3484 |
+
},
|
3485 |
+
"437": {
|
3486 |
+
"content": "<|reserved_427|>",
|
3487 |
+
"lstrip": false,
|
3488 |
+
"normalized": false,
|
3489 |
+
"rstrip": false,
|
3490 |
+
"single_word": false,
|
3491 |
+
"special": true
|
3492 |
+
},
|
3493 |
+
"438": {
|
3494 |
+
"content": "<|reserved_428|>",
|
3495 |
+
"lstrip": false,
|
3496 |
+
"normalized": false,
|
3497 |
+
"rstrip": false,
|
3498 |
+
"single_word": false,
|
3499 |
+
"special": true
|
3500 |
+
},
|
3501 |
+
"439": {
|
3502 |
+
"content": "<|reserved_429|>",
|
3503 |
+
"lstrip": false,
|
3504 |
+
"normalized": false,
|
3505 |
+
"rstrip": false,
|
3506 |
+
"single_word": false,
|
3507 |
+
"special": true
|
3508 |
+
},
|
3509 |
+
"440": {
|
3510 |
+
"content": "<|reserved_430|>",
|
3511 |
+
"lstrip": false,
|
3512 |
+
"normalized": false,
|
3513 |
+
"rstrip": false,
|
3514 |
+
"single_word": false,
|
3515 |
+
"special": true
|
3516 |
+
},
|
3517 |
+
"441": {
|
3518 |
+
"content": "<|reserved_431|>",
|
3519 |
+
"lstrip": false,
|
3520 |
+
"normalized": false,
|
3521 |
+
"rstrip": false,
|
3522 |
+
"single_word": false,
|
3523 |
+
"special": true
|
3524 |
+
},
|
3525 |
+
"442": {
|
3526 |
+
"content": "<|reserved_432|>",
|
3527 |
+
"lstrip": false,
|
3528 |
+
"normalized": false,
|
3529 |
+
"rstrip": false,
|
3530 |
+
"single_word": false,
|
3531 |
+
"special": true
|
3532 |
+
},
|
3533 |
+
"443": {
|
3534 |
+
"content": "<|reserved_433|>",
|
3535 |
+
"lstrip": false,
|
3536 |
+
"normalized": false,
|
3537 |
+
"rstrip": false,
|
3538 |
+
"single_word": false,
|
3539 |
+
"special": true
|
3540 |
+
},
|
3541 |
+
"444": {
|
3542 |
+
"content": "<|reserved_434|>",
|
3543 |
+
"lstrip": false,
|
3544 |
+
"normalized": false,
|
3545 |
+
"rstrip": false,
|
3546 |
+
"single_word": false,
|
3547 |
+
"special": true
|
3548 |
+
},
|
3549 |
+
"445": {
|
3550 |
+
"content": "<|reserved_435|>",
|
3551 |
+
"lstrip": false,
|
3552 |
+
"normalized": false,
|
3553 |
+
"rstrip": false,
|
3554 |
+
"single_word": false,
|
3555 |
+
"special": true
|
3556 |
+
},
|
3557 |
+
"446": {
|
3558 |
+
"content": "<|reserved_436|>",
|
3559 |
+
"lstrip": false,
|
3560 |
+
"normalized": false,
|
3561 |
+
"rstrip": false,
|
3562 |
+
"single_word": false,
|
3563 |
+
"special": true
|
3564 |
+
},
|
3565 |
+
"447": {
|
3566 |
+
"content": "<|reserved_437|>",
|
3567 |
+
"lstrip": false,
|
3568 |
+
"normalized": false,
|
3569 |
+
"rstrip": false,
|
3570 |
+
"single_word": false,
|
3571 |
+
"special": true
|
3572 |
+
},
|
3573 |
+
"448": {
|
3574 |
+
"content": "<|reserved_438|>",
|
3575 |
+
"lstrip": false,
|
3576 |
+
"normalized": false,
|
3577 |
+
"rstrip": false,
|
3578 |
+
"single_word": false,
|
3579 |
+
"special": true
|
3580 |
+
},
|
3581 |
+
"449": {
|
3582 |
+
"content": "<|reserved_439|>",
|
3583 |
+
"lstrip": false,
|
3584 |
+
"normalized": false,
|
3585 |
+
"rstrip": false,
|
3586 |
+
"single_word": false,
|
3587 |
+
"special": true
|
3588 |
+
},
|
3589 |
+
"450": {
|
3590 |
+
"content": "<|reserved_440|>",
|
3591 |
+
"lstrip": false,
|
3592 |
+
"normalized": false,
|
3593 |
+
"rstrip": false,
|
3594 |
+
"single_word": false,
|
3595 |
+
"special": true
|
3596 |
+
},
|
3597 |
+
"451": {
|
3598 |
+
"content": "<|reserved_441|>",
|
3599 |
+
"lstrip": false,
|
3600 |
+
"normalized": false,
|
3601 |
+
"rstrip": false,
|
3602 |
+
"single_word": false,
|
3603 |
+
"special": true
|
3604 |
+
},
|
3605 |
+
"452": {
|
3606 |
+
"content": "<|reserved_442|>",
|
3607 |
+
"lstrip": false,
|
3608 |
+
"normalized": false,
|
3609 |
+
"rstrip": false,
|
3610 |
+
"single_word": false,
|
3611 |
+
"special": true
|
3612 |
+
},
|
3613 |
+
"453": {
|
3614 |
+
"content": "<|reserved_443|>",
|
3615 |
+
"lstrip": false,
|
3616 |
+
"normalized": false,
|
3617 |
+
"rstrip": false,
|
3618 |
+
"single_word": false,
|
3619 |
+
"special": true
|
3620 |
+
},
|
3621 |
+
"454": {
|
3622 |
+
"content": "<|reserved_444|>",
|
3623 |
+
"lstrip": false,
|
3624 |
+
"normalized": false,
|
3625 |
+
"rstrip": false,
|
3626 |
+
"single_word": false,
|
3627 |
+
"special": true
|
3628 |
+
},
|
3629 |
+
"455": {
|
3630 |
+
"content": "<|reserved_445|>",
|
3631 |
+
"lstrip": false,
|
3632 |
+
"normalized": false,
|
3633 |
+
"rstrip": false,
|
3634 |
+
"single_word": false,
|
3635 |
+
"special": true
|
3636 |
+
},
|
3637 |
+
"456": {
|
3638 |
+
"content": "<|reserved_446|>",
|
3639 |
+
"lstrip": false,
|
3640 |
+
"normalized": false,
|
3641 |
+
"rstrip": false,
|
3642 |
+
"single_word": false,
|
3643 |
+
"special": true
|
3644 |
+
},
|
3645 |
+
"457": {
|
3646 |
+
"content": "<|reserved_447|>",
|
3647 |
+
"lstrip": false,
|
3648 |
+
"normalized": false,
|
3649 |
+
"rstrip": false,
|
3650 |
+
"single_word": false,
|
3651 |
+
"special": true
|
3652 |
+
},
|
3653 |
+
"458": {
|
3654 |
+
"content": "<|reserved_448|>",
|
3655 |
+
"lstrip": false,
|
3656 |
+
"normalized": false,
|
3657 |
+
"rstrip": false,
|
3658 |
+
"single_word": false,
|
3659 |
+
"special": true
|
3660 |
+
},
|
3661 |
+
"459": {
|
3662 |
+
"content": "<|reserved_449|>",
|
3663 |
+
"lstrip": false,
|
3664 |
+
"normalized": false,
|
3665 |
+
"rstrip": false,
|
3666 |
+
"single_word": false,
|
3667 |
+
"special": true
|
3668 |
+
},
|
3669 |
+
"460": {
|
3670 |
+
"content": "<|reserved_450|>",
|
3671 |
+
"lstrip": false,
|
3672 |
+
"normalized": false,
|
3673 |
+
"rstrip": false,
|
3674 |
+
"single_word": false,
|
3675 |
+
"special": true
|
3676 |
+
},
|
3677 |
+
"461": {
|
3678 |
+
"content": "<|reserved_451|>",
|
3679 |
+
"lstrip": false,
|
3680 |
+
"normalized": false,
|
3681 |
+
"rstrip": false,
|
3682 |
+
"single_word": false,
|
3683 |
+
"special": true
|
3684 |
+
},
|
3685 |
+
"462": {
|
3686 |
+
"content": "<|reserved_452|>",
|
3687 |
+
"lstrip": false,
|
3688 |
+
"normalized": false,
|
3689 |
+
"rstrip": false,
|
3690 |
+
"single_word": false,
|
3691 |
+
"special": true
|
3692 |
+
},
|
3693 |
+
"463": {
|
3694 |
+
"content": "<|reserved_453|>",
|
3695 |
+
"lstrip": false,
|
3696 |
+
"normalized": false,
|
3697 |
+
"rstrip": false,
|
3698 |
+
"single_word": false,
|
3699 |
+
"special": true
|
3700 |
+
},
|
3701 |
+
"464": {
|
3702 |
+
"content": "<|reserved_454|>",
|
3703 |
+
"lstrip": false,
|
3704 |
+
"normalized": false,
|
3705 |
+
"rstrip": false,
|
3706 |
+
"single_word": false,
|
3707 |
+
"special": true
|
3708 |
+
},
|
3709 |
+
"465": {
|
3710 |
+
"content": "<|reserved_455|>",
|
3711 |
+
"lstrip": false,
|
3712 |
+
"normalized": false,
|
3713 |
+
"rstrip": false,
|
3714 |
+
"single_word": false,
|
3715 |
+
"special": true
|
3716 |
+
},
|
3717 |
+
"466": {
|
3718 |
+
"content": "<|reserved_456|>",
|
3719 |
+
"lstrip": false,
|
3720 |
+
"normalized": false,
|
3721 |
+
"rstrip": false,
|
3722 |
+
"single_word": false,
|
3723 |
+
"special": true
|
3724 |
+
},
|
3725 |
+
"467": {
|
3726 |
+
"content": "<|reserved_457|>",
|
3727 |
+
"lstrip": false,
|
3728 |
+
"normalized": false,
|
3729 |
+
"rstrip": false,
|
3730 |
+
"single_word": false,
|
3731 |
+
"special": true
|
3732 |
+
},
|
3733 |
+
"468": {
|
3734 |
+
"content": "<|reserved_458|>",
|
3735 |
+
"lstrip": false,
|
3736 |
+
"normalized": false,
|
3737 |
+
"rstrip": false,
|
3738 |
+
"single_word": false,
|
3739 |
+
"special": true
|
3740 |
+
},
|
3741 |
+
"469": {
|
3742 |
+
"content": "<|reserved_459|>",
|
3743 |
+
"lstrip": false,
|
3744 |
+
"normalized": false,
|
3745 |
+
"rstrip": false,
|
3746 |
+
"single_word": false,
|
3747 |
+
"special": true
|
3748 |
+
},
|
3749 |
+
"470": {
|
3750 |
+
"content": "<|reserved_460|>",
|
3751 |
+
"lstrip": false,
|
3752 |
+
"normalized": false,
|
3753 |
+
"rstrip": false,
|
3754 |
+
"single_word": false,
|
3755 |
+
"special": true
|
3756 |
+
},
|
3757 |
+
"471": {
|
3758 |
+
"content": "<|reserved_461|>",
|
3759 |
+
"lstrip": false,
|
3760 |
+
"normalized": false,
|
3761 |
+
"rstrip": false,
|
3762 |
+
"single_word": false,
|
3763 |
+
"special": true
|
3764 |
+
},
|
3765 |
+
"472": {
|
3766 |
+
"content": "<|reserved_462|>",
|
3767 |
+
"lstrip": false,
|
3768 |
+
"normalized": false,
|
3769 |
+
"rstrip": false,
|
3770 |
+
"single_word": false,
|
3771 |
+
"special": true
|
3772 |
+
},
|
3773 |
+
"473": {
|
3774 |
+
"content": "<|reserved_463|>",
|
3775 |
+
"lstrip": false,
|
3776 |
+
"normalized": false,
|
3777 |
+
"rstrip": false,
|
3778 |
+
"single_word": false,
|
3779 |
+
"special": true
|
3780 |
+
},
|
3781 |
+
"474": {
|
3782 |
+
"content": "<|reserved_464|>",
|
3783 |
+
"lstrip": false,
|
3784 |
+
"normalized": false,
|
3785 |
+
"rstrip": false,
|
3786 |
+
"single_word": false,
|
3787 |
+
"special": true
|
3788 |
+
},
|
3789 |
+
"475": {
|
3790 |
+
"content": "<|reserved_465|>",
|
3791 |
+
"lstrip": false,
|
3792 |
+
"normalized": false,
|
3793 |
+
"rstrip": false,
|
3794 |
+
"single_word": false,
|
3795 |
+
"special": true
|
3796 |
+
},
|
3797 |
+
"476": {
|
3798 |
+
"content": "<|reserved_466|>",
|
3799 |
+
"lstrip": false,
|
3800 |
+
"normalized": false,
|
3801 |
+
"rstrip": false,
|
3802 |
+
"single_word": false,
|
3803 |
+
"special": true
|
3804 |
+
},
|
3805 |
+
"477": {
|
3806 |
+
"content": "<|reserved_467|>",
|
3807 |
+
"lstrip": false,
|
3808 |
+
"normalized": false,
|
3809 |
+
"rstrip": false,
|
3810 |
+
"single_word": false,
|
3811 |
+
"special": true
|
3812 |
+
},
|
3813 |
+
"478": {
|
3814 |
+
"content": "<|reserved_468|>",
|
3815 |
+
"lstrip": false,
|
3816 |
+
"normalized": false,
|
3817 |
+
"rstrip": false,
|
3818 |
+
"single_word": false,
|
3819 |
+
"special": true
|
3820 |
+
},
|
3821 |
+
"479": {
|
3822 |
+
"content": "<|reserved_469|>",
|
3823 |
+
"lstrip": false,
|
3824 |
+
"normalized": false,
|
3825 |
+
"rstrip": false,
|
3826 |
+
"single_word": false,
|
3827 |
+
"special": true
|
3828 |
+
},
|
3829 |
+
"480": {
|
3830 |
+
"content": "<|reserved_470|>",
|
3831 |
+
"lstrip": false,
|
3832 |
+
"normalized": false,
|
3833 |
+
"rstrip": false,
|
3834 |
+
"single_word": false,
|
3835 |
+
"special": true
|
3836 |
+
},
|
3837 |
+
"481": {
|
3838 |
+
"content": "<|reserved_471|>",
|
3839 |
+
"lstrip": false,
|
3840 |
+
"normalized": false,
|
3841 |
+
"rstrip": false,
|
3842 |
+
"single_word": false,
|
3843 |
+
"special": true
|
3844 |
+
},
|
3845 |
+
"482": {
|
3846 |
+
"content": "<|reserved_472|>",
|
3847 |
+
"lstrip": false,
|
3848 |
+
"normalized": false,
|
3849 |
+
"rstrip": false,
|
3850 |
+
"single_word": false,
|
3851 |
+
"special": true
|
3852 |
+
},
|
3853 |
+
"483": {
|
3854 |
+
"content": "<|reserved_473|>",
|
3855 |
+
"lstrip": false,
|
3856 |
+
"normalized": false,
|
3857 |
+
"rstrip": false,
|
3858 |
+
"single_word": false,
|
3859 |
+
"special": true
|
3860 |
+
},
|
3861 |
+
"484": {
|
3862 |
+
"content": "<|reserved_474|>",
|
3863 |
+
"lstrip": false,
|
3864 |
+
"normalized": false,
|
3865 |
+
"rstrip": false,
|
3866 |
+
"single_word": false,
|
3867 |
+
"special": true
|
3868 |
+
},
|
3869 |
+
"485": {
|
3870 |
+
"content": "<|reserved_475|>",
|
3871 |
+
"lstrip": false,
|
3872 |
+
"normalized": false,
|
3873 |
+
"rstrip": false,
|
3874 |
+
"single_word": false,
|
3875 |
+
"special": true
|
3876 |
+
},
|
3877 |
+
"486": {
|
3878 |
+
"content": "<|reserved_476|>",
|
3879 |
+
"lstrip": false,
|
3880 |
+
"normalized": false,
|
3881 |
+
"rstrip": false,
|
3882 |
+
"single_word": false,
|
3883 |
+
"special": true
|
3884 |
+
},
|
3885 |
+
"487": {
|
3886 |
+
"content": "<|reserved_477|>",
|
3887 |
+
"lstrip": false,
|
3888 |
+
"normalized": false,
|
3889 |
+
"rstrip": false,
|
3890 |
+
"single_word": false,
|
3891 |
+
"special": true
|
3892 |
+
},
|
3893 |
+
"488": {
|
3894 |
+
"content": "<|reserved_478|>",
|
3895 |
+
"lstrip": false,
|
3896 |
+
"normalized": false,
|
3897 |
+
"rstrip": false,
|
3898 |
+
"single_word": false,
|
3899 |
+
"special": true
|
3900 |
+
},
|
3901 |
+
"489": {
|
3902 |
+
"content": "<|reserved_479|>",
|
3903 |
+
"lstrip": false,
|
3904 |
+
"normalized": false,
|
3905 |
+
"rstrip": false,
|
3906 |
+
"single_word": false,
|
3907 |
+
"special": true
|
3908 |
+
},
|
3909 |
+
"490": {
|
3910 |
+
"content": "<|reserved_480|>",
|
3911 |
+
"lstrip": false,
|
3912 |
+
"normalized": false,
|
3913 |
+
"rstrip": false,
|
3914 |
+
"single_word": false,
|
3915 |
+
"special": true
|
3916 |
+
},
|
3917 |
+
"491": {
|
3918 |
+
"content": "<|reserved_481|>",
|
3919 |
+
"lstrip": false,
|
3920 |
+
"normalized": false,
|
3921 |
+
"rstrip": false,
|
3922 |
+
"single_word": false,
|
3923 |
+
"special": true
|
3924 |
+
},
|
3925 |
+
"492": {
|
3926 |
+
"content": "<|reserved_482|>",
|
3927 |
+
"lstrip": false,
|
3928 |
+
"normalized": false,
|
3929 |
+
"rstrip": false,
|
3930 |
+
"single_word": false,
|
3931 |
+
"special": true
|
3932 |
+
},
|
3933 |
+
"493": {
|
3934 |
+
"content": "<|reserved_483|>",
|
3935 |
+
"lstrip": false,
|
3936 |
+
"normalized": false,
|
3937 |
+
"rstrip": false,
|
3938 |
+
"single_word": false,
|
3939 |
+
"special": true
|
3940 |
+
},
|
3941 |
+
"494": {
|
3942 |
+
"content": "<|reserved_484|>",
|
3943 |
+
"lstrip": false,
|
3944 |
+
"normalized": false,
|
3945 |
+
"rstrip": false,
|
3946 |
+
"single_word": false,
|
3947 |
+
"special": true
|
3948 |
+
},
|
3949 |
+
"495": {
|
3950 |
+
"content": "<|reserved_485|>",
|
3951 |
+
"lstrip": false,
|
3952 |
+
"normalized": false,
|
3953 |
+
"rstrip": false,
|
3954 |
+
"single_word": false,
|
3955 |
+
"special": true
|
3956 |
+
},
|
3957 |
+
"496": {
|
3958 |
+
"content": "<|reserved_486|>",
|
3959 |
+
"lstrip": false,
|
3960 |
+
"normalized": false,
|
3961 |
+
"rstrip": false,
|
3962 |
+
"single_word": false,
|
3963 |
+
"special": true
|
3964 |
+
},
|
3965 |
+
"497": {
|
3966 |
+
"content": "<|reserved_487|>",
|
3967 |
+
"lstrip": false,
|
3968 |
+
"normalized": false,
|
3969 |
+
"rstrip": false,
|
3970 |
+
"single_word": false,
|
3971 |
+
"special": true
|
3972 |
+
},
|
3973 |
+
"498": {
|
3974 |
+
"content": "<|reserved_488|>",
|
3975 |
+
"lstrip": false,
|
3976 |
+
"normalized": false,
|
3977 |
+
"rstrip": false,
|
3978 |
+
"single_word": false,
|
3979 |
+
"special": true
|
3980 |
+
},
|
3981 |
+
"499": {
|
3982 |
+
"content": "<|reserved_489|>",
|
3983 |
+
"lstrip": false,
|
3984 |
+
"normalized": false,
|
3985 |
+
"rstrip": false,
|
3986 |
+
"single_word": false,
|
3987 |
+
"special": true
|
3988 |
+
},
|
3989 |
+
"500": {
|
3990 |
+
"content": "<|reserved_490|>",
|
3991 |
+
"lstrip": false,
|
3992 |
+
"normalized": false,
|
3993 |
+
"rstrip": false,
|
3994 |
+
"single_word": false,
|
3995 |
+
"special": true
|
3996 |
+
},
|
3997 |
+
"64011": {
|
3998 |
+
"content": "Mathias",
|
3999 |
+
"lstrip": false,
|
4000 |
+
"normalized": true,
|
4001 |
+
"rstrip": false,
|
4002 |
+
"single_word": false,
|
4003 |
+
"special": false
|
4004 |
+
},
|
4005 |
+
"64014": {
|
4006 |
+
"content": "python",
|
4007 |
+
"lstrip": false,
|
4008 |
+
"normalized": true,
|
4009 |
+
"rstrip": false,
|
4010 |
+
"single_word": false,
|
4011 |
+
"special": false
|
4012 |
+
},
|
4013 |
+
"64394": {
|
4014 |
+
"content": "<|cot_start|>",
|
4015 |
+
"lstrip": false,
|
4016 |
+
"normalized": false,
|
4017 |
+
"rstrip": false,
|
4018 |
+
"single_word": false,
|
4019 |
+
"special": true
|
4020 |
+
},
|
4021 |
+
"64395": {
|
4022 |
+
"content": "<|cot_end|>",
|
4023 |
+
"lstrip": false,
|
4024 |
+
"normalized": false,
|
4025 |
+
"rstrip": false,
|
4026 |
+
"single_word": false,
|
4027 |
+
"special": true
|
4028 |
+
},
|
4029 |
+
"64396": {
|
4030 |
+
"content": "<|review_start|>",
|
4031 |
+
"lstrip": false,
|
4032 |
+
"normalized": false,
|
4033 |
+
"rstrip": false,
|
4034 |
+
"single_word": false,
|
4035 |
+
"special": true
|
4036 |
+
},
|
4037 |
+
"64397": {
|
4038 |
+
"content": "<|review_end|>",
|
4039 |
+
"lstrip": false,
|
4040 |
+
"normalized": false,
|
4041 |
+
"rstrip": false,
|
4042 |
+
"single_word": false,
|
4043 |
+
"special": true
|
4044 |
+
},
|
4045 |
+
"64398": {
|
4046 |
+
"content": "<|file_start|>",
|
4047 |
+
"lstrip": false,
|
4048 |
+
"normalized": false,
|
4049 |
+
"rstrip": false,
|
4050 |
+
"single_word": false,
|
4051 |
+
"special": true
|
4052 |
+
},
|
4053 |
+
"64399": {
|
4054 |
+
"content": "<|file_end|>",
|
4055 |
+
"lstrip": false,
|
4056 |
+
"normalized": false,
|
4057 |
+
"rstrip": false,
|
4058 |
+
"single_word": false,
|
4059 |
+
"special": true
|
4060 |
+
}
|
4061 |
+
},
|
4062 |
+
"bos_token": "<|startoftext|>",
|
4063 |
+
"clean_up_tokenization_spaces": true,
|
4064 |
+
"eos_token": "<|im_end|>",
|
4065 |
+
"extra_special_tokens": {},
|
4066 |
+
"legacy": false,
|
4067 |
+
"model_max_length": 1000000000000000019884624838656,
|
4068 |
+
"pad_token": "<|pad|>",
|
4069 |
+
"sp_model_kwargs": {},
|
4070 |
+
"spaces_between_special_tokens": false,
|
4071 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
4072 |
+
"use_default_system_prompt": false,
|
4073 |
+
"use_fast": true
|
4074 |
+
}
|