Hi @wsunadawong and @philschmid
I am facing an error when deploying a fine-tuned BERT model on sagemaker. I have trained the model already and the model is in S3 (model.tar.gz
). When I try running the snippet of code below, I get an error calling the deploy(...)
. → TypeError: expected str, bytes or os.PathLike object, not NoneType
from sagemaker.huggingface.model import HuggingFaceModel
import sagemaker
role = sagemaker.get_execution_role()
# create Hugging Face Model Class
huggingface_model = HuggingFaceModel(
model_data="s3://sagemaker/huggingface-pytorch-training/output/model.tar.gz", # path to your trained sagemaker model
role=role, # iam role with permissions to create an Endpoint
transformers_version="4.6", # transformers version used
pytorch_version="1.7", # pytorch version used
py_version="py36", # python version of the DLC
)
Then deploy
# deploy model to SageMaker Inference
predictor = huggingface_model.deploy(
initial_instance_count=1,
instance_type="ml.m5.xlarge"
)
And this is the error
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-36-da06cee412a2> in <module>
2 predictor = huggingface_model.deploy(
3 initial_instance_count=1,
----> 4 instance_type="ml.m5.xlarge"
5 )
/opt/conda/lib/python3.6/site-packages/sagemaker/model.py in deploy(self, initial_instance_count, instance_type, serializer, deserializer, accelerator_type, endpoint_name, tags, kms_key, wait, data_capture_config, **kwargs)
761 if self._base_name is not None:
762 self._base_name = "-".join((self._base_name, compiled_model_suffix))
--> 763
764 self._create_sagemaker_model(instance_type, accelerator_type, tags)
765 production_variant = sagemaker.production_variant(
/opt/conda/lib/python3.6/site-packages/sagemaker/model.py in _create_sagemaker_model(self, instance_type, accelerator_type, tags)
315 Args:
316 output_path (str): where in S3 to store the output of the job
--> 317 role (str): what role to use when executing the job
318 packaging_job_name (str): what to name the packaging job
319 compilation_job_name (str): what compilation job to source the model from
/opt/conda/lib/python3.6/site-packages/sagemaker/huggingface/model.py in prepare_container_def(self, instance_type, accelerator_type)
269
270 deploy_key_prefix = model_code_key_prefix(self.key_prefix, self.name, deploy_image)
--> 271 self._upload_code(deploy_key_prefix, repack=True)
272 deploy_env = dict(self.env)
273 deploy_env.update(self._framework_env_vars())
/opt/conda/lib/python3.6/site-packages/sagemaker/model.py in _upload_code(self, key_prefix, repack)
1136 utils.repack_model(
1137 inference_script=self.entry_point,
-> 1138 source_directory=self.source_dir,
1139 dependencies=self.dependencies,
1140 model_uri=self.model_data,
/opt/conda/lib/python3.6/site-packages/sagemaker/utils.py in repack_model(inference_script, source_directory, dependencies, model_uri, repacked_model_uri, sagemaker_session, kms_key)
413
414 _create_or_update_code_dir(
--> 415 model_dir, inference_script, source_directory, dependencies, sagemaker_session, tmp
416 )
417
/opt/conda/lib/python3.6/site-packages/sagemaker/utils.py in _create_or_update_code_dir(model_dir, inference_script, source_directory, dependencies, sagemaker_session, tmp)
461 os.mkdir(code_dir)
462 try:
--> 463 shutil.copy2(inference_script, code_dir)
464 except FileNotFoundError:
465 if os.path.exists(os.path.join(code_dir, inference_script)):
/opt/conda/lib/python3.6/shutil.py in copy2(src, dst, follow_symlinks)
260 """
261 if os.path.isdir(dst):
--> 262 dst = os.path.join(dst, os.path.basename(src))
263 copyfile(src, dst, follow_symlinks=follow_symlinks)
264 copystat(src, dst, follow_symlinks=follow_symlinks)
/opt/conda/lib/python3.6/posixpath.py in basename(p)
144 def basename(p):
145 """Returns the final component of a pathname"""
--> 146 p = os.fspath(p)
147 sep = _get_sep(p)
148 i = p.rfind(sep) + 1
TypeError: expected str, bytes or os.PathLike object, not NoneType
I opened the model.tar.gz
locally and this is the content of the folder:
Any help is much appreciated!