Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| Manual model testing script for Hugging Face Inference API | |
| """ | |
| import requests | |
| import os | |
| import json | |
| def test_model(model_id, prompt="Hello, how are you?"): | |
| """Test a model on the Hugging Face Inference API.""" | |
| token = os.getenv("HF_TOKEN") | |
| if not token: | |
| print("β No HF_TOKEN found") | |
| return False | |
| headers = {"Authorization": f"Bearer {token}"} | |
| url = f"https://api-inference.huggingface.co/models/{model_id}" | |
| payload = { | |
| "inputs": prompt, | |
| "parameters": { | |
| "max_new_tokens": 50, | |
| "temperature": 0.1 | |
| } | |
| } | |
| try: | |
| print(f"π§ͺ Testing {model_id}...") | |
| response = requests.post(url, headers=headers, json=payload, timeout=30) | |
| print(f" Status: {response.status_code}") | |
| if response.status_code == 200: | |
| result = response.json() | |
| print(f" β Success: {str(result)[:100]}...") | |
| return True | |
| else: | |
| print(f" β Error: {response.text[:200]}...") | |
| return False | |
| except Exception as e: | |
| print(f" β Exception: {str(e)}") | |
| return False | |
| def main(): | |
| """Test various models to find working ones.""" | |
| print("π Testing Hugging Face Models") | |
| print("=" * 50) | |
| # Test models that are commonly available | |
| models_to_test = [ | |
| "microsoft/DialoGPT-medium", | |
| "gpt2", | |
| "distilgpt2", | |
| "microsoft/DialoGPT-small", | |
| "facebook/blenderbot-400M-distill", | |
| "Salesforce/codet5-small", | |
| "microsoft/codebert-base", | |
| "bigcode/starcoder", | |
| "codellama/CodeLlama-7b-Instruct-hf", | |
| "defog/sqlcoder-7b-2" | |
| ] | |
| working_models = [] | |
| for model_id in models_to_test: | |
| if test_model(model_id): | |
| working_models.append(model_id) | |
| print() | |
| print("=" * 50) | |
| print(f"β Working models: {len(working_models)}") | |
| for model in working_models: | |
| print(f" - {model}") | |
| if working_models: | |
| print("\nπ Suggested config/models.yaml:") | |
| print("models:") | |
| for i, model_id in enumerate(working_models[:4], 1): | |
| name = model_id.split("/")[-1].replace("-", "_").replace(".", "_") | |
| print(f""" - name: "{name}" | |
| provider: "huggingface" | |
| model_id: "{model_id}" | |
| params: | |
| max_new_tokens: 512 | |
| temperature: 0.1 | |
| top_p: 0.9 | |
| description: "Working model from Hugging Face" | |
| """) | |
| if __name__ == "__main__": | |
| main() | |