GPT-J training from scratch error

ValueError: The model did not return a loss from the inputs, only the following keys: last_hidden_state,past_key_values. For reference, the inputs it received are input_ids,attention_mask.

from datasets import load_dataset
dataset = load_dataset("text", data_files="Filosofia Teoretica - Dataset - GPT-J.txt")

from transformers import AutoTokenizer
Tokenizer = AutoTokenizer.from_pretrained("Filosofia Teoretica - Tokenizer - GPT-J")

def TokenizeDataset( inputs ):
    return Tokenizer(inputs['text'], padding = "max_length", max_length = 1024, truncation = True)

TDataset = dataset.map(TokenizeDataset, batched = True)

from transformers import GPTJModel, GPTJConfig
configuration = GPTJConfig(n_embd = 128, vocab_size = 20000)
model = GPTJModel(configuration)

from transformers import Trainer
from transformers import TrainingArguments

training_args = TrainingArguments(output_dir = "Filosofia Teoretica - Model - GPT-J", evaluation_strategy = "epoch", num_train_epochs = 3)

model_trainer = Trainer(
    model = model,
    args = training_args,
    train_dataset = TDataset['train'],
)

model_trainer.train()

What I’m doing wrong? Can someone help?