Less Trainable Parameters after quantization

Sure! tbh is not anything fancy:

model_id = "mistralai/Mistral-7B-Instruct-v0.2"

model = AutoModelForCausalLM.from_pretrained(
    model_id,
    # quantization_config=bnb_config,
    device_map='auto',
)

params_dict = {
    'Name': [],
    'Shape': [],
    'Parameters': [],
    'RequiresGrad': []
}

for name, param in model.named_parameters():
    params_dict['Name'].append(name)
    params_dict['Shape'].append(param.shape)
    params_dict['Parameters'].append(param.numel())
    params_dict['RequiresGrad'].append(param.requires_grad)

pd.DataFrame(params_dict)