tanvir-ishraq's picture
Update app.py
a01fe33
raw
history blame
1.07 kB
import gradio as gr
from transformers import AutoTokenizer
import torch, json
from fastai.text.all import *
from blurr.text.modeling.all import *
#from blurr.text.data.all import *
# from blurr.modeling.core import Blearner
# learner = Blearner.load_learner('path/to/your/export.pkl')
# result = learner.blurr_predict('Your text here')
with open('question_labels.json', 'r') as f:
question_dictionary = json.load(f)
que_classes = list(question_dictionary.keys())
blurr_model = load_learner('healifyLLM-stage4.pkl')
def detect_question(text):
probs = blurr_model.blurr_predict(text)[0]['probs']
return dict(zip(que_classes, map(float, probs)))
label = gr.outputs.Label(num_top_classes=5)
#interface with i/o
iface = gr.Interface(fn=detect_question, inputs="text", outputs=label)
iface.launch(inline=False)
# def your_function(input1):
# # my processing
# class_probs = ...
# some_text = ...
# return class_probs, some_text
#
# iface = gr.Interface(
# fn=your_function,
# inputs="textbox",
# outputs=["json", "text"]
# )