Can I use roberta for tokenizer while bert for model?
from transformers import RobertaTokenizerFast
tokenizer = RobertaTokenizerFast.from_pretrained("./bert_tokenizer", max_len=512)
from transformers import BertForMaskedLM
config = BertConfig()
bert= BertForMaskedLM(config)