Create a Custom Model with pertained weights of PubMedBERT

In chapter 4, we try to create a custom XLMRobertaForTokenClassification.
I would like to build a custom model with PubMedBert as the base.

Any suggestions on how to do it.

Below is the code given in the text.

class CustomModel(BertPreTrainedModel):
    config_class = BertConfig
def __init__(self, config):
    super().__init__(config)
    self.num_labels = config.num_labels # Load model body
    self.bert = BertModel(config, add_pooling_layer=False)
    # Set up token classification head
    self.dropout = nn.Dropout(config.hidden_dropout_prob)
    self.classifier = nn.Linear(config.hidden_size, config.num_labels) # Load and initialize weights
    self.init_weights()
def forward(self, input_ids=None, attention_mask=None, token_type_ids=None,labels=None, **kwargs):
    # Use model body to get encoder representations
    outputs = self.bert(input_ids, attention_mask=attention_mask,
    token_type_ids=token_type_ids, **kwargs) # Apply classifier to encoder representation
    sequence_output = self.dropout(outputs[0])
    logits = self.classifier(sequence_output) # Calculate losses
    loss = None
    if labels is not None:
        loss_fct = nn.CrossEntropyLoss()
        loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
                    # Return model output object
    return TokenClassifierOutput(loss=loss, logits=logits, hidden_states=outputs.hidden_states,
                                             attentions=outputs.attentions)