import torch
import torch.nn as nn
from transformers import AutoModel, AutoConfig
from transformers.modeling_outputs import TokenClassifierOutput
from transformers import PretrainedConfig, PreTrainedModel
class CustomModel(PreTrainedModel):
** def init(self, config, num_labels):**
** super(CustomModel, self).init(config)**
** self.num_labels = num_labels**
** self.model = AutoModel.from_config(config) # Load model from given config**
** self.dropout = nn.Dropout(config.hidden_dropout_prob)**
** self.lstm = nn.LSTM(input_size=config.hidden_size, hidden_size=128, batch_first=True)**
** self.classifier = nn.Linear(128, num_labels)**
** self.init_weights()**
** def forward(self, input_ids=None, attention_mask=None, labels=None):**
** outputs = self.model(input_ids=input_ids, attention_mask=attention_mask)**
** sequence_output = self.dropout(outputs.last_hidden_state)**
** lstm_output, _ = self.lstm(sequence_output)**
** lstm_output_last = lstm_output[:, -1, :]**
** logits = self.classifier(lstm_output_last)**
** loss = None**
** if labels is not None:**
** loss_fct = nn.CrossEntropyLoss()**
** loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))**
** return TokenClassifierOutput(loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions)**
# Instantiate the configuration and model
config = AutoConfig.from_pretrained(checkpoint, output_hidden_states=True, output_attentions=True)
model = CustomModel(config, num_labels=2)
How to upload this architecture , cause when i upload i get error of corruption of state dictionary