| from typing import Dict, List, Any |
| |
| from transformers import DistilBertTokenizer, DistilBertForSequenceClassification |
|
|
|
|
| |
| from transformers import AutoModel |
| |
|
|
| from transformers import pipeline, AutoTokenizer |
|
|
| |
| checkpoint = "distilbert-base-uncased" |
|
|
| class EndpointHandler(): |
| |
| def __init__(self, path=""): |
| |
| |
| |
| |
|
|
| |
| |
| model = DistilBertForSequenceClassification.from_pretrained(checkpoint) |
| tokenizer = DistilBertTokenizer.from_pretrained(checkpoint) |
|
|
| |
| self.pipeline = pipeline("text-classification", model=model, tokenizer=tokenizer) |
|
|
|
|
| def __call__(self, data: Any) -> List[List[Dict[str, float]]]: |
| """ |
| Args: |
| data (:obj:): |
| includes the input data and the parameters for the inference. |
| Return: |
| A :obj:`list`:. The object returned should be a list of one list like [[{"label": 0.9939950108528137}]] containing : |
| - "label": A string representing what the label/class is. There can be multiple labels. |
| - "score": A score between 0 and 1 describing how confident the model is for this label/class. |
| """ |
| inputs = data.pop("inputs", data) |
| parameters = data.pop("parameters", None) |
|
|
| |
| if parameters is not None: |
| prediction = self.pipeline(inputs, **parameters) |
| else: |
| prediction = self.pipeline(inputs) |
| |
| return prediction |
|
|