Upload tokenizer
f32c835 verified | | { |
| | "version": "1.0", |
| | "truncation": null, |
| | "padding": null, |
| | "added_tokens": [ |
| | { |
| | "id": 0, |
| | "content": "<pad>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 1, |
| | "content": "<s>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 2, |
| | "content": "</s>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | }, |
| | { |
| | "id": 3, |
| | "content": "<unk>", |
| | "single_word": false, |
| | "lstrip": false, |
| | "rstrip": false, |
| | "normalized": false, |
| | "special": true |
| | } |
| | ], |
| | "normalizer": null, |
| | "pre_tokenizer": { |
| | "type": "Whitespace" |
| | }, |
| | "post_processor": null, |
| | "decoder": null, |
| | "model": { |
| | "type": "WordLevel", |
| | "vocab": { |
| | "<pad>": 0, |
| | "<s>": 1, |
| | "</s>": 2, |
| | "<unk>": 3, |
| | "0": 4, |
| | "1": 5, |
| | "2": 6, |
| | "3": 7, |
| | "4": 8, |
| | "5": 9, |
| | "6": 10, |
| | "7": 11, |
| | "8": 12, |
| | "9": 13, |
| | "MOD_2": 14, |
| | "EQUALS": 15, |
| | "END": 16, |
| | "SUM": 17, |
| | "MOD_3": 18, |
| | "MOD_5": 19, |
| | "MOD_7": 20 |
| | }, |
| | "unk_token": "<unk>" |
| | } |
| | } |