Customizing pipeline problems

Below is a snippet of custom pipeline code in huggingface

import numpy as np

from transformers import Pipeline

def softmax(outputs):
    maxes = np.max(outputs, axis=-1, keepdims=True)
    shifted_exp = np.exp(outputs - maxes)
    return shifted_exp / shifted_exp.sum(axis=-1, keepdims=True)

class PairClassificationPipeline(Pipeline):
    def _sanitize_parameters(self, **kwargs):
        preprocess_kwargs = {}
        if "second_text" in kwargs:
            preprocess_kwargs["second_text"] = kwargs["second_text"]
        return preprocess_kwargs, {}, {}

    def preprocess(self, text, second_text=None):
        return self.tokenizer(text, text_pair=second_text, return_tensors=self.framework)

    def _forward(self, model_inputs):
        return self.model(**model_inputs)

    def postprocess(self, model_outputs):
        logits = model_outputs.logits[0].numpy()
        probabilities = softmax(logits)

        best_class = np.argmax(probabilities)
        label = self.model.config.id2label[best_class]
        score = probabilities[best_class].item()
        logits = logits.tolist()
        return {"label": label, "score": score, "logits": logits}

My question is, how can I put model_inputs that we use in def _forward into def postprocess? Because I should use the model_inputs along with outputs to get final postprocessed output, I need model_inputs in postprocess function. So essentially what I want would look like,

def postprocess(self, model_outputs, model_inputs):
  embeddings = model_outputs.last_hidden_state
  mask = model_inputs[1].unsqueeze(-1).expand(embeddings.size()).float()
  masked_embeddings = embeddings * mask
  summed_emb = torch.sum(masked_embeddings, 1)
  summed_mask = torch.clamp(mask.sum(1), min=1e-9)
  mean_pooled_emb = summed_emb / summed_mask
  return mean_pooled_emb

Can anyone help how I should modify the custom pipeline code and how I should run it?