File size: 1,070 Bytes
ed1d37c 4ea1af7 ed1d37c a01fe33 ed1d37c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import gradio as gr
from transformers import AutoTokenizer
import torch, json
from fastai.text.all import *
from blurr.text.modeling.all import *
#from blurr.text.data.all import *
# from blurr.modeling.core import Blearner
# learner = Blearner.load_learner('path/to/your/export.pkl')
# result = learner.blurr_predict('Your text here')
with open('question_labels.json', 'r') as f:
question_dictionary = json.load(f)
que_classes = list(question_dictionary.keys())
blurr_model = load_learner('healifyLLM-stage4.pkl')
def detect_question(text):
probs = blurr_model.blurr_predict(text)[0]['probs']
return dict(zip(que_classes, map(float, probs)))
label = gr.outputs.Label(num_top_classes=5)
#interface with i/o
iface = gr.Interface(fn=detect_question, inputs="text", outputs=label)
iface.launch(inline=False)
# def your_function(input1):
# # my processing
# class_probs = ...
# some_text = ...
# return class_probs, some_text
#
# iface = gr.Interface(
# fn=your_function,
# inputs="textbox",
# outputs=["json", "text"]
# )
|