How to save bert or distilbert model?

Hi ALL!
I’m having issues loading my trained distilbert model, cannot figure out a way to resolve the issue, when i try to load the model i get value error that im not able to resolve, my codes are below, you can find the notebook, data and model in my google drive

https://drive.google.com/drive/folders/1HV60_4D2JZ_Yru5tVRDblhUdgKOIQCS7?usp=sharing

“”" ValueError: Unknown layer: TFDistilBertModel. Please ensure this object is passed to the custom_objects argument. See Save and load Keras models  |  TensorFlow Core for details.“”“”

#config = DistilBertConfig.from_pretrained(MODEL_NAME, output_hidden_states=True, output_attentions=True)

DistilBERT = TFDistilBertModel.from_pretrained(MODEL_NAME, config=config)

input_ids_in = tf.keras.layers.Input(shape=(MAX_LENGTH,), name=‘input_token’, dtype=‘int32’)

input_masks_in = tf.keras.layers.Input(shape=(MAX_LENGTH,), name=‘masked_token’, dtype=‘int32’)

embedding_layer = DistilBERT(input_ids = input_ids_in, attention_mask = input_masks_in)[0]

X = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(128, return_sequences=True))(embedding_layer)

X = tf.keras.layers.GlobalMaxPool1D()(X)

X = tf.keras.layers.Dense(64, activation=‘relu’)(X)

X = tf.keras.layers.Dropout(0.2)(X)

X = tf.keras.layers.Dense(4, activation=‘softmax’)(X)

model = tf.keras.Model(inputs=[input_ids_in, input_masks_in], outputs = X)

for layer in model.layers[:3]:

layer.trainable = False

model.summary()

optim = tf.keras.optimizers.Adam()

loss_func = tf.keras.losses.CategoricalCrossentropy()

acc = tf.keras.metrics.CategoricalAccuracy(‘accuracy’)

model.compile(loss=loss_func,

optimizer=optim,

metrics=[tf.keras.metrics.categorical_accuracy])

#Y_train_cat = keras.utils.to_categorical(y_train)

#history = model.fit(X_train,

Y_train_cat,

epochs = 4,

batch_size=256,

shuffle= True,

validation_split=.2,

callbacks=[model_checkpoint, early_stopping, reduce_lr])

model.save(“test_distilbert.h5”)

#from tensorflow.keras.models import load_model
#load_model(“test_distilbert.h5”)