Uploading model error when use plot_logit_lens

Hey,

I tried to test the example code for plot_logit_lens from transformer-utils: transformer-utils ¡ PyPI

And I get error message when loading the model :

—> 12 model = transformers.AutoModelForCausalLM.from_pretrained(‘gpt2-xl’)
15 def text_to_input_ids(text):
16 toks = tokenizer.encode(text)

File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/low_memory/enable.py:10, in low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs)
9 def low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs):
—> 10 config_path, model_path = huggingface_model_local_paths(pretrained_model_name_or_path)
12 model = low_memory_load(config_path=config_path, model_path=model_path, verbose=False)
14 return model

File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:39, in huggingface_model_local_paths(model_name)
38 def huggingface_model_local_paths(model_name):
—> 39 config_path = get_local_path_from_huggingface_cdn(model_name, “config.json”)
41 fix_config_with_missing_model_type(model_name, config_path)
43 model_path = get_local_path_from_huggingface_cdn(model_name, “pytorch_model.bin”)

File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:27, in get_local_path_from_huggingface_cdn(key, filename)
26 def get_local_path_from_huggingface_cdn(key, filename):
—> 27 archive_file = transformers.file_utils.hf_bucket_url(
28 key,
29 filename=filename,
30 )
32 resolved_archive_file = transformers.file_utils.cached_path(
33 archive_file,
34 )
35 return resolved_archive_file

AttributeError: module ‘transformers.file_utils’ has no attribute ‘hf_bucket_url’

I also tried model = GPT2LMHeadModel.from_pretrained(‘gpt2’)
It returned the same error. I appreciate any help!

Thanks!