class CustomClassifier(nn.Module):
.....
class MultipleClassifiers(PreTrainedModel):
def __init__(self, encoder:AutoModel, config: AutoConfig) -> None:
super().__init__(config)
self.encoder = encoder
self.output_heads = nn.ModuleDict()
classifier1 = CustomClassfier(encoder, num_classes=100)
classifier2 = CustomClassfier(encoder, num_classes=1000)
...
config = AutoConfig.from_pretrained("distilbert-base")
encoder = AutoModel("distilbert-base")
model = MultipleClassifiers(encoder, config)
# I trained the model using Trainer.
# The problem is when I wan to load the trained model:
model = AutoModel.from_pretrained("path") # load RobertaModel not MultipleClassifiers
model = AutoModelForSequenceClassification.from_pretrained("path") # RobertaForSequenceClassification
How to modify the config or anything else to force the model loading MultipleClassifiers AutoModelForSequenceClassification because I want to use it with TextClassificationPipeline