def tokenize(batch):
texts = [str(text) for text in batch[“text”]] # convert all to str
return tokenizer(texts, padding=True, truncation=True)
emotions_encoded = emotions.map(tokenize, batched=True, batch_size=None)
IT WORKS!!
def tokenize(batch):
texts = [str(text) for text in batch[“text”]] # convert all to str
return tokenizer(texts, padding=True, truncation=True)
emotions_encoded = emotions.map(tokenize, batched=True, batch_size=None)
IT WORKS!!