{"special_tokens_map_file": "pretrained_model_hf_large_165K/special_tokens_map.json", "name_or_path": "pretrained_model_hf_large_165K", "tokenizer_class": "PreTrainedTokenizerFast"}