How to train a Semantic Segmentation model using transformers tensorflow2 API

I want to train a Semantic Segmentation model using transformers tensorflow2 API. But the docs only provides the case for pytorch. I tried to implement, but it didn’t work. Here is my code.

from datasets import load_dataset
from transformers import SegformerFeatureExtractor, TFSegformerForSemanticSegmentation

ds = load_dataset("scene_parse_150", split="train[:50]")
ds = ds.train_test_split(test_size=0.2)
train_ds = ds["train"]
test_ds = ds["test"]

import json
from huggingface_hub import cached_download, hf_hub_url

repo_id = "huggingface/label-files"
filename = "ade20k-hf-doc-builder.json"
id2label = json.load(open(cached_download(hf_hub_url(repo_id, filename, repo_type="dataset")), "r"))
id2label = {int(k): v for k, v in id2label.items()}
label2id = {v: k for k, v in id2label.items()}
num_labels = len(id2label)

def data_transforms(example_batch):
    images = [x for x in example_batch["image"]]
    labels = [x for x in example_batch["annotation"]]
    inputs = feature_extractor(images, labels)
    return inputs

train_ds.set_transform(data_transforms)
test_ds.set_transform(data_transforms)

feature_extractor = AutoFeatureExtractor.from_pretrained("nvidia/mit-b0", reduce_labels=True)
model = TFSegformerForSemanticSegmentation.from_pretrained("nvidia/segformer-b0-finetuned-ade-512-512")

from transformers import create_optimizer
optimizer, schedule = create_optimizer(init_lr=0.0001, num_warmup_steps=100,
                                               num_train_steps=100)

model.compile(optimizer=optimizer)

model.fit(x=trainset, validation_data=valset, epochs=1)