Issue with loading MPT-30b in baidu aistudio

I am loading MPT-30b from huggingface and converting it to a paddle model on baidu aistudio using x2paddle.

This code works on my own computer, but I have insufficient RAM to load the model and thus I tried using Baidu AI Studio to run the code.
This is my code:

import os
os.environ[‘CURL_CA_BUNDLE’] = ‘’
from transformers import AutoModelForCausalLM
from transformers import AutoModel
torch_model = AutoModelForCausalLM.from_pretrained(“mosaicml/mpt-30b”, trust_remote_code=True)
import x2paddle
save_dir = “./MPT-30b”
jit_type = “trace”
x2paddle.convert.pytorch2paddle(module=torch_model,
save_dir=save_dir,
jit_type=jit_type)

and I have installed the packages:
!pip install -U transformers
!pip install paddlepaddle==2.4.2
!pip install x2paddle
!pip install -U torch
!pip install paddlenlp>=2.5.2

!pip install -U huggingface_hub

Running the code, I get this bad handshake error:
SysCallError Traceback (most recent call last)
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py in wrap_socket(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)
484 try:
→ 485 cnx.do_handshake()
486 except OpenSSL.SSL.WantReadError:

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/OpenSSL/SSL.py in do_handshake(self)
1990 result = _lib.SSL_do_handshake(self._ssl)
→ 1991 self._raise_ssl_error(self._ssl, result)
1992

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/OpenSSL/SSL.py in _raise_ssl_error(self, ssl, result)
1691 if errno != 0:
→ 1692 raise SysCallError(errno, errorcode.get(errno))
1693 raise SysCallError(-1, “Unexpected EOF”)

SysCallError: (104, ‘ECONNRESET’)

During handling of the above exception, another exception occurred:

SSLError Traceback (most recent call last)
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
671 headers=headers,
→ 672 chunked=chunked,
673 )

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
375 try:
→ 376 self._validate_conn(conn)
377 except (SocketTimeout, BaseSSLError) as e:

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/connectionpool.py in _validate_conn(self, conn)
993 if not getattr(conn, “sock”, None): # AppEngine might not have .sock
→ 994 conn.connect()
995

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/connection.py in connect(self)
393 server_hostname=server_hostname,
→ 394 ssl_context=context,
395 )

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/util/ssl_.py in ssl_wrap_socket(sock, keyfile, certfile, cert_reqs, ca_certs, server_hostname, ssl_version, ciphers, ssl_context, ca_cert_dir, key_password)
369 if HAS_SNI and server_hostname is not None:
→ 370 return context.wrap_socket(sock, server_hostname=server_hostname)
371

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py in wrap_socket(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)
490 except OpenSSL.SSL.Error as e:
→ 491 raise ssl.SSLError(“bad handshake: %r” % e)
492 break

SSLError: (“bad handshake: SysCallError(104, ‘ECONNRESET’)”,)

During handling of the above exception, another exception occurred:

MaxRetryError Traceback (most recent call last)
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/requests/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
448 retries=self.max_retries,
→ 449 timeout=timeout
450 )

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
719 retries = retries.increment(
→ 720 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
721 )

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/urllib3/util/retry.py in increment(self, method, url, response, error, _pool, _stacktrace)
435 if new_retry.is_exhausted():
→ 436 raise MaxRetryError(_pool, url, error or ResponseError(cause))
437

MaxRetryError: HTTPSConnectionPool(host=‘huggingface.co’, port=443): Max retries exceeded with url: /mosaicml/mpt-30b/resolve/main/config.json (Caused by SSLError(SSLError(“bad handshake: SysCallError(104, ‘ECONNRESET’)”)))

During handling of the above exception, another exception occurred:

SSLError Traceback (most recent call last)
/tmp/ipykernel_247/2815760928.py in
4 from transformers import AutoModelForCausalLM
5 from transformers import AutoModel
----> 6 torch_model = AutoModelForCausalLM.from_pretrained(“mosaicml/mpt-30b”, trust_remote_code=True)
7 import x2paddle
8 save_dir = “./MPT-30b”

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/transformers/models/auto/auto_factory.py in from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
459 trust_remote_code=trust_remote_code,
460 **hub_kwargs,
→ 461 **kwargs,
462 )
463

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/transformers/models/auto/configuration_auto.py in from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
942 kwargs[“name_or_path”] = pretrained_model_name_or_path
943 trust_remote_code = kwargs.pop(“trust_remote_code”, None)
→ 944 config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
945 has_remote_code = “auto_map” in config_dict and “AutoConfig” in config_dict[“auto_map”]
946 has_local_code = “model_type” in config_dict and config_dict[“model_type”] in CONFIG_MAPPING

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/transformers/configuration_utils.py in get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
572 original_kwargs = copy.deepcopy(kwargs)
573 # Get config dict associated with the base config file
→ 574 config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
575 if “_commit_hash” in config_dict:
576 original_kwargs[“_commit_hash”] = config_dict[“_commit_hash”]

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/transformers/configuration_utils.py in _get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
639 revision=revision,
640 subfolder=subfolder,
→ 641 _commit_hash=commit_hash,
642 )
643 commit_hash = extract_commit_hash(resolved_config_file, commit_hash)

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/transformers/utils/hub.py in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash)
427 resume_download=resume_download,
428 use_auth_token=use_auth_token,
→ 429 local_files_only=local_files_only,
430 )
431

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
116 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
117
→ 118 return fn(*args, **kwargs)
119
120 return _inner_fn # type: ignore

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/file_download.py in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, local_dir_use_symlinks, user_agent, force_download, force_filename, proxies, etag_timeout, resume_download, token, local_files_only, legacy_cache_layout)
1197 token=token,
1198 proxies=proxies,
→ 1199 timeout=etag_timeout,
1200 )
1201 except EntryNotFoundError as http_error:

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
116 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
117
→ 118 return fn(*args, **kwargs)
119
120 return _inner_fn # type: ignore

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/file_download.py in get_hf_file_metadata(url, token, proxies, timeout)
1537 follow_relative_redirects=True,
1538 proxies=proxies,
→ 1539 timeout=timeout,
1540 )
1541 hf_raise_for_status(r)

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/file_download.py in _request_wrapper(method, url, max_retries, base_wait_time, max_wait_time, timeout, follow_relative_redirects, **params)
413 timeout=timeout,
414 follow_relative_redirects=False,
→ 415 **params,
416 )
417

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/file_download.py in _request_wrapper(method, url, max_retries, base_wait_time, max_wait_time, timeout, follow_relative_redirects, **params)
449 retry_on_status_codes=(),
450 timeout=timeout,
→ 451 **params,
452 )
453

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/utils/_http.py in http_backoff(method, url, max_retries, base_wait_time, max_wait_time, retry_on_exceptions, retry_on_status_codes, **kwargs)
256
257 # Perform request and return if status_code is not in the retry list.
→ 258 response = session.request(method=method, url=url, **kwargs)
259 if response.status_code not in retry_on_status_codes:
260 return response

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/requests/sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
531 }
532 send_kwargs.update(settings)
→ 533 resp = self.send(prep, **send_kwargs)
534
535 return resp

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/requests/sessions.py in send(self, request, **kwargs)
644
645 # Send the request
→ 646 r = adapter.send(request, **kwargs)
647
648 # Total elapsed time of the request (approximately)

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/huggingface_hub/utils/_http.py in send(self, request, *args, **kwargs)
61 “”“Catch any RequestException to append request id to the error message for debugging.”“”
62 try:
—> 63 return super().send(request, *args, **kwargs)
64 except requests.RequestException as e:
65 request_id = request.headers.get(X_AMZN_TRACE_ID)

/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/requests/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
512 if isinstance(e.reason, _SSLError):
513 # This branch is for urllib3 v1.22 and later.
→ 514 raise SSLError(e, request=request)
515
516 raise ConnectionError(e, request=request)

SSLError: (MaxRetryError(‘HTTPSConnectionPool(host='huggingface.co', port=443): Max retries exceeded with url: /mosaicml/mpt-30b/resolve/main/config.json (Caused by SSLError(SSLError(“bad handshake: SysCallError(104, 'ECONNRESET')”)))’), ‘(Request ID: 6d6e0478-343b-4872-abfa-72742998fd48)’)

Help would be greatly appreciated.