This is the code used to reload the model.
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
model = torch.load(model_path, map_location=device)
if torch.cuda.is_available():
model.cuda()
model.eval()
input_ids = dataset[0].to(device)
input_masks = dataset[1].to(device)
with torch.no_grad():
logits = model(
input_ids,
token_type_ids=None,
attention_mask=input_masks,
return_dict=False,
labels=None
)[0]
The error occurs after the model() call. Stack trace:
/usr/local/lib/python3.7/site-packages/torch/nn/modules/module.py:722: in _call_impl
result = self.forward(*input, **kwargs)
/usr/local/lib/python3.7/site-packages/transformers/models/bert/modeling_bert.py:1375: in forward
return_dict=return_dict,
/usr/local/lib/python3.7/site-packages/torch/nn/modules/module.py:722: in _call_impl
result = self.forward(*input, **kwargs)
/usr/local/lib/python3.7/site-packages/transformers/models/bert/modeling_bert.py:862: in forward
input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds
/usr/local/lib/python3.7/site-packages/torch/nn/modules/module.py:722: in _call_impl
result = self.forward(*input, **kwargs)
/usr/local/lib/python3.7/site-packages/transformers/models/bert/modeling_bert.py:202: in forward
if self.position_embedding_type == "absolute":
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = BertEmbeddings(
(word_embeddings): Embedding(30000, 768, padding_idx=3)
(position_embeddings): Embedding(512, 768)... 768)
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
(dropout): Dropout(p=0.1, inplace=False)
)
name = 'position_embedding_type'
def __getattr__(self, name: str) -> Union[Tensor, 'Module']:
if '_parameters' in self.__dict__:
_parameters = self.__dict__['_parameters']
if name in _parameters:
return _parameters[name]
if '_buffers' in self.__dict__:
_buffers = self.__dict__['_buffers']
if name in _buffers:
return _buffers[name]
if '_modules' in self.__dict__:
modules = self.__dict__['_modules']
if name in modules:
return modules[name]
raise ModuleAttributeError("'{}' object has no attribute '{}'".format(
> type(self).__name__, name))
E torch.nn.modules.module.ModuleAttributeError: 'BertEmbeddings' object has no attribute 'position_embedding_type'
Thank you for your help