| from transformers import prune_model | |
| model = torch.load('merged_model.safetensors') | |
| pruned_model = prune_model(model, amount=0.25) # Removes 20% of parameters | |
| torch.save(pruned_model, 'merged_model.safetensors') |
| from transformers import prune_model | |
| model = torch.load('merged_model.safetensors') | |
| pruned_model = prune_model(model, amount=0.25) # Removes 20% of parameters | |
| torch.save(pruned_model, 'merged_model.safetensors') |