File size: 1,212 Bytes
0062794
c90a3b5
0062794
 
c90a3b5
0062794
c90a3b5
0062794
 
 
 
 
c90a3b5
0062794
c90a3b5
0062794
 
 
 
 
c90a3b5
0062794
c90a3b5
0062794
 
 
 
c90a3b5
 
0062794
c90a3b5
0062794
 
c90a3b5
0062794
c90a3b5
 
 
 
0062794
 
c90a3b5
0062794
 
c90a3b5
0062794
c90a3b5
0062794
c90a3b5
 
 
0062794
c90a3b5
 
 
0062794
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
{
  "add_prefix_space": false,
  "added_tokens_decoder": {
    "0": {
      "content": "<s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "1": {
      "content": "<pad>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "2": {
      "content": "</s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "3": {
      "content": "<unk>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "50264": {
      "content": "<mask>",
      "lstrip": true,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    }
  },
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": true,
  "cls_token": "<s>",
  "eos_token": "</s>",
  "errors": "replace",
  "mask_token": "<mask>",
  "model_max_length": 1024,
  "pad_token": "<pad>",
  "sep_token": "</s>",
  "tokenizer_class": "BartTokenizer",
  "trim_offsets": true,
  "unk_token": "<unk>"
}