File size: 545 Bytes
1814385 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
from transformers import pipeline
import torch
# Load your model
classifier = pipeline(
"text-classification",
model="your-username/your-model-name", # Replace with your model path
tokenizer="your-username/your-model-name"
)
def predict(text):
"""Simple prediction function"""
result = classifier(text)
return result
# Example usage
if __name__ == "__main__":
sample_text = "This is an amazing model!"
prediction = predict(sample_text)
print(f"Input: {sample_text}")
print(f"Prediction: {prediction}") |