Hello,
I am trying to setup a very simple example:
from transformers import pipeline
generator = pipeline(âtext-generationâ, model=âgpt2â)
But it seems transformers is automatically trying to use keras instead of pytorch. This seems to be happening with any model I use, not just gpt2. Its also happening only with pipeline, the rest of examples I can run just fine.
Is this the expected behaviour?
if so, is it possible to switch it to pytorch?
Error I get:
ModuleNotFoundError Traceback (most recent call last)
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\utils\import_utils.py:1082, in _LazyModule._get_module(self, module_name)
1081 try:
â 1082 return importlib.import_module(â.â + module_name, self.name)
1083 except Exception as e:
File c:\Users\LKP\anaconda3\envs\hugg\lib\importlib_init_.py:127, in import_module(name, package)
126 level += 1
â 127 return _bootstrap._gcd_import(name[level:], package, level)
File :1014, in _gcd_import(name, package, level)
File :991, in find_and_load(name, import)
File :975, in find_and_load_unlocked(name, import)
File :671, in _load_unlocked(spec)
File :843, in exec_module(self, module)
File :219, in _call_with_frames_removed(f, *args, **kwds)
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\models\gpt2\modeling_tf_gpt2.py:26
24 import tensorflow as tf
â> 26 from âŚactivations_tf import get_tf_activation
27 from âŚmodeling_tf_outputs import (
28 TFBaseModelOutputWithPastAndCrossAttentions,
29 TFCausalLMOutputWithCrossAttentions,
30 TFSequenceClassifierOutputWithPast,
31 )
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\activations_tf.py:107
105 return tf.keras.activations.gelu(x, approximate=True)
â 107 gelu = tf.keras.activations.gelu
108 gelu_new = approximate_gelu_wrap
File ~\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\util\lazy_loader.py:58, in LazyLoader.getattr(self, item)
57 def getattr(self, item):
â> 58 module = self._load()
59 return getattr(module, item)
File ~\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\util\lazy_loader.py:41, in LazyLoader._load(self)
40 # Import the target module and insert it into the parentâs namespace
â> 41 module = importlib.import_module(self.name)
42 self._parent_module_globals[self._local_name] = module
File c:\Users\LKP\anaconda3\envs\hugg\lib\importlib_init_.py:127, in import_module(name, package)
126 level += 1
â 127 return _bootstrap._gcd_import(name[level:], package, level)
ModuleNotFoundError: No module named âkerasâ
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
Cell In[2], line 8
5 from transformers import pipeline
6 #from transformers import pipeline, set_seed
----> 8 generator = pipeline(âtext-generationâ, model=âgpt2â)
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\pipelines_init_.py:788, in pipeline(task, model, config, tokenizer, feature_extractor, image_processor, framework, revision, use_fast, use_auth_token, device, device_map, torch_dtype, trust_remote_code, model_kwargs, pipeline_class, **kwargs)
786 if isinstance(model, str) or framework is None:
787 model_classes = {âtfâ: targeted_task[âtfâ], âptâ: targeted_task[âptâ]}
â 788 framework, model = infer_framework_load_model(
789 model,
790 model_classes=model_classes,
791 config=config,
792 framework=framework,
793 task=task,
794 **hub_kwargs,
795 **model_kwargs,
796 )
798 model_config = model.config
799 hub_kwargs[â_commit_hashâ] = model.config._commit_hash
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\pipelines\base.py:245, in infer_framework_load_model(model, config, model_classes, task, framework, **model_kwargs)
243 classes.append(_class)
244 if look_tf:
â 245 _class = getattr(transformers_module, f"TF{architecture}", None)
246 if _class is not None:
247 classes.append(_class)
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\utils\import_utils.py:1073, in _LazyModule.getattr(self, name)
1071 elif name in self._class_to_module.keys():
1072 module = self._get_module(self._class_to_module[name])
â 1073 value = getattr(module, name)
1074 else:
1075 raise AttributeError(f"module {self.name} has no attribute {name}")
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\utils\import_utils.py:1072, in _LazyModule.getattr(self, name)
1070 value = self._get_module(name)
1071 elif name in self._class_to_module.keys():
â 1072 module = self._get_module(self._class_to_module[name])
1073 value = getattr(module, name)
1074 else:
File c:\Users\LKP\anaconda3\envs\hugg\lib\site-packages\transformers\utils\import_utils.py:1084, in _LazyModule._get_module(self, module_name)
1082 return importlib.import_module(â.â + module_name, self.name)
1083 except Exception as e:
â 1084 raise RuntimeError(
1085 f"Failed to import {self.name}.{module_name} because of the following error (look up to see its"
1086 f" traceback):\n{e}"
1087 ) from e
RuntimeError: Failed to import transformers.models.gpt2.modeling_tf_gpt2 because of the following error (look up to see its traceback):
No module named âkerasâ