update tokenizer
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
|
@@ -43,6 +43,6 @@
|
|
| 43 |
"padding_side": "right",
|
| 44 |
"sep_token": "======",
|
| 45 |
"split_special_tokens": false,
|
| 46 |
-
"tokenizer_class": "
|
| 47 |
"unk_token": "<|endoftext|>"
|
| 48 |
}
|
|
|
|
| 43 |
"padding_side": "right",
|
| 44 |
"sep_token": "======",
|
| 45 |
"split_special_tokens": false,
|
| 46 |
+
"tokenizer_class": "GPT2Tokenizer",
|
| 47 |
"unk_token": "<|endoftext|>"
|
| 48 |
}
|