| { | |
| "added_tokens_decoder": { | |
| "0": { | |
| "content": "[PAD]", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "1": { | |
| "content": "[UNK]", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "83": { | |
| "content": "<s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "84": { | |
| "content": "</s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "bos_token": "<s>", | |
| "clean_up_tokenization_spaces": false, | |
| "do_lower_case": false, | |
| "do_normalize": false, | |
| "eos_token": "</s>", | |
| "extra_special_tokens": {}, | |
| "model_max_length": 1000000000000000019884624838656, | |
| "pad_token": "[PAD]", | |
| "return_attention_mask": false, | |
| "tokenizer_class": "Wav2Vec2Tokenizer", | |
| "unk_token": "[UNK]", | |
| "word_delimiter_token": "|" | |
| } | |