Trainer class does not read in labels

# loading base model and tokenizers
quant_config = BitsAndBytesConfig(
    load_in_4bit=True,
    bnb_4bit_use_double_quant=True,
    bnb_4bit_quant_type="nf4",
    bnb_4bit_compute_dtype=torch.float16
)
model = AutoModelForSequenceClassification.from_pretrained(
    'garage-bAInd/Platypus2-7B',
    quantization_config=quant_config,
    num_labels= 2,
    problem_type="single_label_classification"
)
from peft import LoraConfig, prepare_model_for_kbit_training, get_peft_model, PeftModel, PeftConfig, TaskType, PeftModelForSequenceClassification

peft_config = LoraConfig(
    r=8, lora_alpha=4, task_type=TaskType.SEQ_CLS, lora_dropout=0.1, 
    bias="none", inference_mode=False, target_modules=["gate_proj", "down_proj", "up_proj"]
)
model = get_peft_model(model, peft_config)
model.print_trainable_parameters()
from trl import SFTTrainer
training_args = TrainingArguments(
    output_dir='op',
    overwrite_output_dir = True,
    warmup_ratio=0.1,
    lr_scheduler_type='cosine',
    per_device_train_batch_size=1,
    per_device_eval_batch_size=1,
    num_train_epochs=1,
    learning_rate=2e-4,
    label_names = ['label'],
    remove_unused_columns=False,
    optim="adafactor",
    logging_steps=250,
    eval_steps=250,
    evaluation_strategy='steps',
    load_best_model_at_end=True,
    save_total_limit = 2,
    fp16=True,
    bf16=False,
    weight_decay=0.01,
    report_to="none",
)
trainer = SFTTrainer(
    model=model,
    args=training_args,
    tokenizer = tokenizer,
    train_dataset=combined_dataset,
    dataset_text_field = 'text',
    peft_config=peft_config,
    max_seq_length = 10000,
)

I was trying to finetune a sequence classifier with the above settings. However when I call trainer.train_dataset there is no label presented in it. Is it because platypus does not support sequence classification?