Update tokenizer.json
Browse files- tokenizer.json +2 -4
tokenizer.json
CHANGED
|
@@ -448,12 +448,10 @@
|
|
| 448 |
"normalizer": null,
|
| 449 |
"pre_tokenizer": {
|
| 450 |
"type": "Sequence",
|
| 451 |
-
"
|
| 452 |
{
|
| 453 |
"type": "Replace",
|
| 454 |
-
"pattern":
|
| 455 |
-
"String": " "
|
| 456 |
-
},
|
| 457 |
"content": "[SPACE]"
|
| 458 |
},
|
| 459 |
{
|
|
|
|
| 448 |
"normalizer": null,
|
| 449 |
"pre_tokenizer": {
|
| 450 |
"type": "Sequence",
|
| 451 |
+
"pre_tokenizers": [
|
| 452 |
{
|
| 453 |
"type": "Replace",
|
| 454 |
+
"pattern": " ",
|
|
|
|
|
|
|
| 455 |
"content": "[SPACE]"
|
| 456 |
},
|
| 457 |
{
|