Update tokenizer.json
Browse files- tokenizer.json +13 -1
tokenizer.json
CHANGED
|
@@ -447,7 +447,19 @@
|
|
| 447 |
],
|
| 448 |
"normalizer": null,
|
| 449 |
"pre_tokenizer": {
|
| 450 |
-
"type": "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 451 |
},
|
| 452 |
"post_processor": {
|
| 453 |
"type": "TemplateProcessing",
|
|
|
|
| 447 |
],
|
| 448 |
"normalizer": null,
|
| 449 |
"pre_tokenizer": {
|
| 450 |
+
"type": "Sequence",
|
| 451 |
+
"pretokenizers": [
|
| 452 |
+
{
|
| 453 |
+
"type": "Replace",
|
| 454 |
+
"pattern": {
|
| 455 |
+
"String": " "
|
| 456 |
+
},
|
| 457 |
+
"content": "[SPACE]"
|
| 458 |
+
},
|
| 459 |
+
{
|
| 460 |
+
"type": "Whitespace"
|
| 461 |
+
}
|
| 462 |
+
]
|
| 463 |
},
|
| 464 |
"post_processor": {
|
| 465 |
"type": "TemplateProcessing",
|