Export to Onnx and run inference Bigbirdpegasus summariser

I am trying to export the model into onnx and run inference but getting error. Can somebody help understand what changes should I make?

from pathlib import Path
from transformers.onnx import export
from transformers import AutoTokenizer, AutoModel

onnx_path = Path(“model.onnx”)
model_ckpt = “google/bigbird-pegasus-large-pubmed”
base_model = AutoModel.from_pretrained(model_ckpt)
tokenizer = AutoTokenizer.from_pretrained(model_ckpt)
onnx_inputs, onnx_outputs = export(tokenizer, base_model, onnx_config, onnx_config.default_onnx_opset, onnx_path)
tokenizer = AutoTokenizer.from_pretrained(“google/bigbird-pegasus-large-pubmed”)
session = InferenceSession(“model.onnx”)
inputs = tokenizer(“Summarize this text”, return_tensors=“np”)
inputs = {k: v.astype(np.int64) for k, v in inputs.items()}
outputs = session.run(output_names=[“last_hidden_state”], input_feed=inputs)

I am getting the following error:
ValueError: Model requires 4 inputs. Input Feed contains 2