Hey,
I tried to test the example code for plot_logit_lens from transformer-utils: transformer-utils ¡ PyPI
And I get error message when loading the model :
â> 12 model = transformers.AutoModelForCausalLM.from_pretrained(âgpt2-xlâ)
15 def text_to_input_ids(text):
16 toks = tokenizer.encode(text)
File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/low_memory/enable.py:10, in low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs)
9 def low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs):
â> 10 config_path, model_path = huggingface_model_local_paths(pretrained_model_name_or_path)
12 model = low_memory_load(config_path=config_path, model_path=model_path, verbose=False)
14 return model
File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:39, in huggingface_model_local_paths(model_name)
38 def huggingface_model_local_paths(model_name):
â> 39 config_path = get_local_path_from_huggingface_cdn(model_name, âconfig.jsonâ)
41 fix_config_with_missing_model_type(model_name, config_path)
43 model_path = get_local_path_from_huggingface_cdn(model_name, âpytorch_model.binâ)
File ~/opt/anaconda3/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:27, in get_local_path_from_huggingface_cdn(key, filename)
26 def get_local_path_from_huggingface_cdn(key, filename):
â> 27 archive_file = transformers.file_utils.hf_bucket_url(
28 key,
29 filename=filename,
30 )
32 resolved_archive_file = transformers.file_utils.cached_path(
33 archive_file,
34 )
35 return resolved_archive_file
AttributeError: module âtransformers.file_utilsâ has no attribute âhf_bucket_urlâ
I also tried model = GPT2LMHeadModel.from_pretrained(âgpt2â)
It returned the same error. I appreciate any help!
Thanks!