Spaces:
Build error
Build error
from transformers import AutoTokenizer | |
from transformers import AutoModelForSequenceClassification | |
import torch | |
import numpy as np | |
labels = ['Not Acceptale', "Acceptable"] | |
model_name = "abdulmatinomotoso/English_Grammar_Checker" | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
def get_emotion(sentence): | |
input_tensor = tokenizer.encode(sentence, return_tensors="pt") | |
logits = model(input_tensor).logits | |
softmax = torch.nn.Softmax(dim=1) | |
probs = softmax(logits)[0] | |
probs = probs.cpu().detach().numpy() | |
max_index = np.argmax(probs) | |
result = labels[max_index] | |
return result | |
demo = gr.Interface(get_emotion, inputs=['text'], | |
outputs="text", | |
title = "English Grammar Checker") | |
if __name__ == "__main__": | |
demo.launch(debug=True) |