HTTP error when using BertTokenizer.from_pretrained

from transformers import AutoModel, AutoTokenizer, BertForSequenceClassification, BertTokenizer
model_name = “ProsusAI/finbert”

model = AutoModel.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)

produces the following error:


HTTPError Traceback (most recent call last)
~\Anaconda3\lib\site-packages\transformers\file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, use_auth_token, local_files_only)
1947 r = requests.head(url, headers=headers, allow_redirects=False, proxies=proxies, timeout=etag_timeout)
→ 1948 r.raise_for_status()
1949 etag = r.headers.get(“X-Linked-Etag”) or r.headers.get(“ETag”)

~\Anaconda3\lib\site-packages\requests\models.py in raise_for_status(self)
721 if http_error_msg:
→ 722 raise HTTPError(http_error_msg, response=self)
723

HTTPError: 404 Client Error: Not Found

During handling of the above exception, another exception occurred:

AttributeError Traceback (most recent call last)
in
6
7 model=BertForSequenceClassification.from_pretrained(“mrm8488/distilroberta-finetuned-financial-news-sentiment-analysis”,num_labels=3)
----> 8 tokenizer = BertTokenizer.from_pretrained(“mrm8488/distilroberta-finetuned-financial-news-sentiment-analysis”)
9
10 sentence = “Operating profit rose to EUR 13.1 mn from EUR 8.7 mn in the corresponding period in 2007 representing 7.7 % of net sales.”

~\Anaconda3\lib\site-packages\transformers\tokenization_utils_base.py in from_pretrained(cls, pretrained_model_name_or_path, *init_inputs, **kwargs)
1693 else:
1694 try:
→ 1695 resolved_vocab_files[file_id] = cached_path(
1696 file_path,
1697 cache_dir=cache_dir,

~\Anaconda3\lib\site-packages\transformers\file_utils.py in cached_path(url_or_filename, cache_dir, force_download, proxies, resume_download, user_agent, extract_compressed_file, force_extract, use_auth_token, local_files_only)
1774 if is_remote_url(url_or_filename):
1775 # URL, so get it from the cache (downloading if necessary)
→ 1776 output_path = get_from_cache(
1777 url_or_filename,
1778 cache_dir=cache_dir,

~\Anaconda3\lib\site-packages\transformers\file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, use_auth_token, local_files_only)
1961 if 300 <= r.status_code <= 399:
1962 url_to_download = r.headers[“Location”]
→ 1963 except (requests.exceptions.SSLError, requests.exceptions.ProxyError):
1964 # Actually raise for those subclasses of ConnectionError
1965 raise

AttributeError: module ‘requests.exceptions’ has no attribute ‘ProxyError’

I tried different models (other finbert models) and also AutoTokenizer as well and I get the same error!

What am I not doing right?