AuditLLM / app.py
Amirizaniani's picture
Create app.py
f7ecb69 verified
raw
history blame
No virus
3.1 kB
import gradio as gr
from langchain import PromptTemplate, LLMChain
from langchain.llms import CTransformers
def generate_prompts(user_input):
prompt_template = PromptTemplate(
input_variables=["Question"],
template=f"Just list 10 quetion prompts for {user_input} and don't put number before each of the prompts."
)
config = {'max_new_tokens': 2048, 'temperature': 0.7, 'context_length': 4096}
llm = CTransformers(model="TheBloke/Mistral-7B-Instruct-v0.1-GGUF",
config=config,
threads=os.cpu_count())
hub_chain = LLMChain(prompt = prompt_template, llm = llm)
input_data = {"Question": user_input}
generated_prompts = hub_chain.run(input_data)
questions_list = generated_prompts.split('\n')
formatted_questions = "\n".join(f"Question: {question}" for i, question in enumerate(questions_list) if question.strip())
questions_list = formatted_questions.split("Question:")[1:]
return questions_list
def answer_question(prompt):
prompt_template = PromptTemplate(
input_variables=["Question"],
template=f"give one answer for {prompt} and do not consider the number behind it."
)
config = {'max_new_tokens': 2048, 'temperature': 0.7, 'context_length': 4096}
llm = CTransformers(model="TheBloke/Llama-2-7B-Chat-GGML",
config=config,
threads=os.cpu_count())
hub_chain = LLMChain(prompt = prompt_template, llm = llm)
input_data = {"Question": prompt}
generated_answer = hub_chain.run(input_data)
return generated_answer
text_list = []
def updateChoices(prompt):
newChoices = generate_prompts(prompt)
return gr.CheckboxGroup(choices=newChoices)
def setTextVisibility(cbg):
update_show = [gr.Textbox(visible=True, label=text, value=answer_question(text)) for text in cbg]
update_hide = [gr.Textbox(visible=False, label="") for _ in range(10-len(cbg))]
return update_show + update_hide
with gr.Blocks() as demo:
gr.HTML("""
<div style="text-align: center; max-width: 1240px; margin: 0 auto;">
<h1 style="font-weight: 200; font-size: 20px; margin-bottom:8px; margin-top:0px;">
Auditing LLMs
</h1>
<hr style="margin-bottom:5px; margin-top:5px;">
</div>
""")
with gr.Row():
prompt_input = gr.Textbox(label="Enter your question", placeholder="Enter Your Question")
with gr.Row():
generate_button = gr.Button("Generate")
with gr.Column():
cbg = gr.CheckboxGroup(choices=[], label="List of the prompts", interactive=True)
generate_button.click(updateChoices, inputs=[prompt_input], outputs=[cbg])
with gr.Row(variant="compact") as exec:
btnExec = gr.Button("Execute")
with gr.Column() as texts:
for i in range(10):
text = gr.Textbox(label="_", visible=False)
text_list.append(text)
btnExec.click(setTextVisibility, inputs=cbg, outputs=text_list)
clear = gr.ClearButton(link="http://127.0.0.1:8160")
# Launch the Gradio app
demo.launch()