|
import re |
|
|
|
import gradio as gr |
|
import requests |
|
|
|
|
|
BACKEND_ADDR = "http://46.8.230.12:8585/post" |
|
|
|
|
|
queries_list = [] |
|
|
|
|
|
llm_name2id = { |
|
"Llama-3.1-70B-Versatile": "llama-3.1-70b-versatile", |
|
"Llama-3-70B-8192": "llama3-70b-8192", |
|
"Llama-3-8B-8192": "llama3-8b-8192" |
|
} |
|
|
|
|
|
DEFAULT_TEMP = 0.2 |
|
DEFAULT_MODEL = "Llama-3-70B-8192" |
|
DEFAULT_USE_RAG = True |
|
|
|
|
|
|
|
def clear_queries(): |
|
global queries_list |
|
|
|
queries_list = [] |
|
return "", DEFAULT_MODEL, DEFAULT_TEMP, DEFAULT_USE_RAG, "", "" |
|
|
|
|
|
|
|
def add_to_list(query_txt, model, temperature, use_rag): |
|
global queries_list |
|
|
|
if len(query_txt) > 0: |
|
queries_list.append( |
|
{ |
|
"prompt": query_txt, |
|
"temperature": str(temperature), |
|
"model": llm_name2id[model], |
|
"use_rag": str(use_rag), |
|
} |
|
) |
|
|
|
return "", generate_queries_str(queries_list) |
|
|
|
|
|
|
|
def submit(query_txt, model, temperature, use_rag): |
|
global queries_list |
|
|
|
if len(query_txt) > 0: |
|
_, queries = add_to_list(query_txt, model, temperature, use_rag) |
|
else: |
|
queries = generate_queries_str(queries_list) |
|
|
|
if len(queries_list) > 0: |
|
response = requests.post(BACKEND_ADDR, json=queries_list) |
|
answers = generate_answers_str(response.json()) |
|
|
|
|
|
queries_list = [] |
|
else: |
|
answers = "" |
|
|
|
return "", queries, answers |
|
|
|
|
|
|
|
def generate_queries_str(queries_list): |
|
delimiter = f"\n{'-' * 120}\n" |
|
queries_str = delimiter.join([q["prompt"] for q in queries_list]) |
|
return queries_str |
|
|
|
|
|
|
|
def generate_answers_str(answers_list): |
|
delimiter = f"\n{'-' * 120}\n" |
|
answers_str = delimiter.join([clean(a["answer"]) for a in answers_list]) |
|
return answers_str |
|
|
|
|
|
|
|
def clean(answer_str): |
|
answer_str = re.sub('^\s*:', '', answer_str) |
|
|
|
garbages = [ |
|
"Here is the generated paragraph:", |
|
"Let me know if this meets your requirements!", |
|
] |
|
for g in garbages: |
|
answer_str = answer_str.replace(g, "") |
|
answer_str = answer_str.strip() |
|
return answer_str |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
with gr.Blocks(theme=gr.themes.Default()) as demo: |
|
with gr.Row(): |
|
with gr.Column(scale=2): |
|
query_txt = gr.Textbox( |
|
placeholder="پرسش خود را اینجا وارد کنید...", |
|
label="Query", rtl=True) |
|
with gr.Column(scale=1): |
|
model = gr.Radio( |
|
choices=[ |
|
"Llama-3-8B-8192", |
|
"Llama-3-70B-8192", |
|
"Llama-3.1-70B-Versatile", |
|
], |
|
value=DEFAULT_MODEL, |
|
label="LLM" |
|
) |
|
use_rag = gr.Checkbox(value=DEFAULT_USE_RAG, label="Use RAG?") |
|
temperature = gr.Slider(minimum=0, maximum=1, |
|
value=DEFAULT_TEMP, |
|
step=0.1, label="Temperature") |
|
|
|
with gr.Row(): |
|
clear_btn = gr.Button("Clear", variant="stop") |
|
add_btn = gr.Button("Add", variant="secondary") |
|
submit_btn = gr.Button("Submit", variant="primary") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
queries_box = gr.Textbox( |
|
placeholder="پرسشهای شما اینجا نمایش داده خواهد شد...", |
|
label="Queries", interactive=False, rtl=True) |
|
with gr.Column(): |
|
answers_box = gr.Textbox( |
|
placeholder="پاسخهای مدل اینجا نمایش داده خواهد شد...", |
|
label="Answers", interactive=False, rtl=True) |
|
|
|
clear_btn.click( |
|
fn=clear_queries, |
|
inputs=[], |
|
outputs=[query_txt, model, temperature, use_rag, |
|
queries_box, answers_box] |
|
) |
|
add_btn.click( |
|
fn=add_to_list, |
|
inputs=[query_txt, model, temperature, use_rag], |
|
outputs=[query_txt, queries_box] |
|
) |
|
submit_btn.click( |
|
fn=submit, |
|
inputs=[query_txt, model, temperature, use_rag], |
|
outputs=[query_txt, queries_box, answers_box] |
|
) |
|
|
|
demo.launch() |