File size: 1,246 Bytes
93ae489
2a96e0e
93ae489
 
2a96e0e
93ae489
2a96e0e
93ae489
 
 
 
2a96e0e
 
93ae489
2a96e0e
93ae489
 
 
 
2a96e0e
 
93ae489
2a96e0e
93ae489
 
 
 
2a96e0e
 
93ae489
2a96e0e
93ae489
 
 
 
2a96e0e
 
 
93ae489
 
 
 
 
 
2a96e0e
93ae489
2a96e0e
 
 
7b2f27b
2a96e0e
93ae489
2a96e0e
 
 
 
 
93ae489
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
{
  "add_prefix_space": false,
  "added_tokens_decoder": {
    "0": {
      "content": "<s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "1": {
      "content": "<pad>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "2": {
      "content": "</s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "3": {
      "content": "<unk>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "50264": {
      "content": "<mask>",
      "lstrip": true,
      "normalized": false,
      "rstrip": false,
      "single_word": false,
      "special": true
    }
  },
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": false,
  "cls_token": "<s>",
  "eos_token": "</s>",
  "errors": "replace",
  "extra_special_tokens": {},
  "mask_token": "<mask>",
  "model_max_length": 512,
  "pad_token": "<pad>",
  "sep_token": "</s>",
  "tokenizer_class": "RobertaTokenizer",
  "trim_offsets": true,
  "unk_token": "<unk>"
}