NameError: name 'LRScheduler' is not defined

I have the following code and it reports error:

from transformers.utils import (
    is_sagemaker_mp_enabled,
    is_torch_available,
    is_torch_xla_available,
    is_training_run_on_sagemaker,
    logging,
)
if is_torch_available():
    from .pytorch_utils import is_torch_greater_or_equal_than_2_0

    if is_torch_greater_or_equal_than_2_0:
        from torch.optim.lr_scheduler import LRScheduler
    else:
        from torch.optim.lr_scheduler import _LRScheduler as LRScheduler
from transformers.trainer_pt_utils import (
    DistributedLengthGroupedSampler,
    DistributedSamplerWithLoop,
    LengthGroupedSampler,
)

{
	"name": "NameError",
	"message": "name 'LRScheduler' is not defined",
	"stack": "---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
Cell In[16], line 15
     13     else:
     14         from torch.optim.lr_scheduler import _LRScheduler as LRScheduler
---> 15 from transformers.trainer_pt_utils import (
     16     DistributedLengthGroupedSampler,
     17     DistributedSamplerWithLoop,
     18     LengthGroupedSampler,
     19 )

File /opt/miniconda3/envs/geneformer/lib/python3.10/site-packages/transformers/trainer_pt_utils.py:1368
   1364     def step(self, closure=None) -> Optional[float]:
   1365         pass
-> 1368 class LayerWiseDummyScheduler(LRScheduler):
   1369     \"\"\"
   1370     For Layer-wise optimizers such as GaLoRE optimizer, the optimization and scheduling step
   1371     are already done through the post gradient hooks. Therefore
   1372     the trick is to create a dummy scheduler that can take arbitrary
   1373     args and kwargs and return a no-op during training.
   1374     \"\"\"
   1376     def __init__(self, *args, **kwargs):

NameError: name 'LRScheduler' is not defined"
}
1 Like

from .pytorch_utils import is_torch_greater_or_equal_than_2_0

I tried commenting out this block and running it because pytorch_utils.py was missing, but no error occurred.
Even if I add from torch.optim.lr_scheduler import LRScheduler, it works normally. I think something is probably happening in pytorch_utils.py