Upload tokenizer
Browse files- special_tokens_map.json +1 -1
- tokenizer_config.json +1 -1
special_tokens_map.json
CHANGED
|
@@ -7,7 +7,7 @@
|
|
| 7 |
"single_word": false
|
| 8 |
},
|
| 9 |
"eos_token": {
|
| 10 |
-
"content": "<|
|
| 11 |
"lstrip": false,
|
| 12 |
"normalized": false,
|
| 13 |
"rstrip": false,
|
|
|
|
| 7 |
"single_word": false
|
| 8 |
},
|
| 9 |
"eos_token": {
|
| 10 |
+
"content": "<|eot_id|>",
|
| 11 |
"lstrip": false,
|
| 12 |
"normalized": false,
|
| 13 |
"rstrip": false,
|
tokenizer_config.json
CHANGED
|
@@ -2061,4 +2061,4 @@
|
|
| 2061 |
"pad_token": "<|reserved_special_token_250|>",
|
| 2062 |
"padding_side": "left",
|
| 2063 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
| 2064 |
-
}
|
|
|
|
| 2061 |
"pad_token": "<|reserved_special_token_250|>",
|
| 2062 |
"padding_side": "left",
|
| 2063 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
| 2064 |
+
}
|