Strange "safe" key missing error

I have an error stemming from the simple code:

import openai
from datasets import load_dataset
from tqdm import tqdm

# from huggingface_hub import login
# login()


mmlu = load_dataset("cais/mmlu", "all")

This is the error message:

Traceback (most recent call last):
File “/Users/rkala/PycharmProjects/thalamus/dataset/createDataset.py”, line 11, in
mmlu = load_dataset(“cais/mmlu”, “all”, split=“train”)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/load.py”, line 2609, in load_dataset
builder_instance.download_and_prepare(
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/builder.py”, line 1027, in download_and_prepare
self._download_and_prepare(
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/builder.py”, line 1100, in _download_and_prepare
split_generators = self._split_generators(dl_manager, **split_generators_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/packaged_modules/parquet/parquet.py”, line 44, in _split_generators
data_files = dl_manager.download_and_extract(self.config.data_files)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/download/download_manager.py”, line 434, in download_and_extract
return self.extract(self.download(url_or_urls))
^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/download/download_manager.py”, line 257, in download
downloaded_path_or_paths = map_nested(
^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 511, in map_nested
mapped = [
^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 512, in
_single_map_nested((function, obj, batched, batch_size, types, None, True, None))
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 399, in _single_map_nested
mapped = [_single_map_nested((function, v, batched, batch_size, types, None, True, None)) for v in pbar]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 399, in
mapped = [_single_map_nested((function, v, batched, batch_size, types, None, True, None)) for v in pbar]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 380, in _single_map_nested
return [mapped_item for batch in iter_batched(data_struct, batch_size) for mapped_item in function(batch)]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/py_utils.py”, line 380, in
return [mapped_item for batch in iter_batched(data_struct, batch_size) for mapped_item in function(batch)]
^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/download/download_manager.py”, line 313, in _download_batched
return [
^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/download/download_manager.py”, line 314, in
self._download_single(url_or_filename, download_config=download_config)
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/download/download_manager.py”, line 323, in _download_single
out = cached_path(url_or_filename, download_config=download_config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/file_utils.py”, line 201, in cached_path
output_path = get_from_cache(
^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/file_utils.py”, line 571, in get_from_cache
response = fsspec_head(url, storage_options=storage_options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/datasets/utils/file_utils.py”, line 351, in fsspec_head
return fs.info(path)
^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/huggingface_hub/hf_file_system.py”, line 540, in info
paths_info = self._api.get_paths_info(
^^^^^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/huggingface_hub/utils/_validators.py”, line 114, in _inner_fn
return fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/huggingface_hub/hf_api.py”, line 3145, in get_paths_info
return [
^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/huggingface_hub/hf_api.py”, line 3146, in
RepoFile(**path_info) if path_info[“type”] == “file” else RepoFolder(**path_info)
^^^^^^^^^^^^^^^^^^^^^
File “/Users/rkala/anaconda3/envs/pythonProject10/lib/python3.11/site-packages/huggingface_hub/hf_api.py”, line 638, in init
safe=security[“safe”], av_scan=security[“avScan”], pickle_import_scan=security[“pickleImportScan”]
~~~~~~~~^^^^^^^^
KeyError: ‘safe’

I seem to be getting the same issue while loading datasets using load_dataset. After searching for similar issues, I found the posts on this KeyError: ‘safe’ issue were rather new with all of them posted within the past hour. Probably something is going wrong inside the huggingface server?

I also getting error with my dataset as well.

HI all, thanks for reporting this! We’re investigating now and we’ll get back to you real soon.

Hi @rkalahasty1, @Goldengrove, @legolasyiu thanks again for reporting this! This issue is now resolved. Please let us know if you continue running into any issues!

1 Like