LayoutLMv2 support in colab TPU

HI,

I’m trying to finetune LayoutLMv2 model on colab TPU but facing the below run time errors. Any suggestions ?

Code snippet :

def training_function():
    accelerator = Accelerator()

    train_dataloader, eval_dataloader = create_dataloaders(
        train_batch_size=hyperparameters["train_batch_size"], eval_batch_size=hyperparameters["eval_batch_size"]
    )
    set_seed(hyperparameters["seed"])
    model = AutoModelForSequenceClassification.from_pretrained("microsoft/layoutlmv2-base-uncased", num_labels=2)

    optimizer = AdamW(params=model.parameters(), lr=hyperparameters["learning_rate"])

    model, optimizer, train_dataloader, eval_dataloader = accelerator.prepare(
        model, optimizer, train_dataloader, eval_dataloader
    )

    num_epochs = hyperparameters["num_epochs"]

    lr_scheduler = get_linear_schedule_with_warmup(
        optimizer=optimizer,
        num_warmup_steps=100,
        num_training_steps=len(train_dataloader) * num_epochs,
    )

    for epoch in range(num_epochs):
        model.train()
        for step, batch in enumerate(train_dataloader):
            outputs = model(**batch)
            loss = outputs.loss
            accelerator.backward(loss)
            
            optimizer.step()
            lr_scheduler.step()
            optimizer.zero_grad()

notebook_launcher(training_function)

And I’m getting the following error

Exception in device=TPU:5: torch_xla/csrc/tensor_methods.cpp:880 : Check failed: xla::ShapeUtil::Compatible(shapes.back(), tensor_shape)