from transformers import AutoConfig import torch def check_model(model_name): try: # Try to load the model configuration config = AutoConfig.from_pretrained(model_name) print("\nModel Configuration:") print(config) # Check if model_type is present print("\nModel Type:", config.model_type if hasattr(config, 'model_type') else 'Not specified') # Try to load the model print("\nAttempting to load model...") model = AutoModelForSequenceClassification.from_pretrained( model_name, torch_dtype=torch.float16, trust_remote_code=True ) print("\nSuccessfully loaded model!") except Exception as e: print(f"\nError: {str(e)}") if __name__ == "__main__": check_model("HabibBelguith44/Llama3-Tunisian-Dialect")