Spaces:
Sleeping
Sleeping
| import spaces | |
| from transformers import pipeline as tpipeline | |
| from optimum.pipelines import pipeline as opipeline | |
| #@spaces.GPU(duration=60) | |
| def classify(tweet, event_model, hftoken, threshold): | |
| results = {"text": None, "event": None, "score": None} | |
| # event type prediction with transformers pipeline | |
| event_predictor = tpipeline(task="text-classification", model=event_model, | |
| batch_size=512, token=hftoken, device="cpu") | |
| tokenizer_kwargs = {'padding': True, 'truncation': True, 'max_length': 512} | |
| prediction = event_predictor(tweet, **tokenizer_kwargs)[0] | |
| # with onnx pipeline | |
| # onnx_classifier = opipeline("text-classification", model=event_model, accelerator="ort", | |
| # batch_size=512, token=hftoken, device="cpu") | |
| # prediction = onnx_classifier(tweet)[0] | |
| results["text"] = tweet | |
| if prediction["label"] != "none" and round(prediction["score"], 2) <= threshold: | |
| results["event"] = "none" | |
| results["score"] = prediction["score"] | |
| else: | |
| results["event"] = prediction["label"] | |
| results["score"] = prediction["score"] | |
| return results |