ImportError Issue Colab

accelerator = Accelerator()
# Load tokenizer and model
print("Loading tokenizer and model")
tokenizer = AutoTokenizer.from_pretrained(
    "facebook/nllb-200-3.3B", use_auth_token=False, src_lang="eng_Latn"
)
model = AutoModelForSeq2SeqLM.from_pretrained(
    "facebook/nllb-200-3.3B",
    use_auth_token=False,
    device = accelerator.device,
    #device_map="auto",
    load_in_8bit=True,
)
print("Tokenizer and model loaded")

# Translate
translate_dataframe(df)


def combine_chunks():
    translated_tasks_list = []
    for index in range(0, len(glob.glob(f"{output_dir}*.json"))):
        with open(f"{output_dir}chunk{index}.json", "rb") as f:
            translated_tasks_list += json.loads(f.read())
    write_json_file(translated_tasks_list, f"./translated_tasks_lug_nllb.json")


combine_chunks()

I get this error yet I have fully installed the packages
“ImportError: Using load_in_8bit=True requires Accelerate: pip install accelerate and the latest version of bitsandbytes pip install -i https://test.pypi.org/simple/ bitsandbytes or pip install bitsandbytes`”

1 Like

I have the same error :sweat: