PatentToolkit / app.py
thepolymerguy's picture
Update app.py
2bde9e1
raw
history blame
6.47 kB
import gradio as gr
from tridentmodel import classification
def add_text(history, text):
history = history + [(text, None)]
return history, ""
def add_file(history, file):
history = history + [((file.name,), None)]
return history
def bot(history):
response = "**That's cool!**"
history[-1][1] = response
return history
"""
Place holder alpaca model trained example:
Required:
!pip install -q datasets loralib sentencepiece
!pip install -q git+https://github.com/zphang/transformers@c3dc391
!pip install bitsandbytes
"""
'''
tokenizer = LLaMATokenizer.from_pretrained("chavinlo/alpaca-native")
model = LLaMAForCausalLM.from_pretrained(
"chavinlo/alpaca-native",
load_in_8bit=True,
device_map="auto",
)
'''
def generateresponse(history):
"""
Model definition here:
"""
'''
global model
global tokenizer
PROMPT = f"""Below is an instruction that describes a task. Write a response that appropriately completes the request.
### Instruction:
{user}
### Response:"""
inputs = tokenizer(
PROMPT,
return_tensors="pt",
)
input_ids = inputs["input_ids"].cuda()
generation_config = GenerationConfig(
temperature=0.6,
top_p=0.95,
repetition_penalty=1.15,
)
print("Generating...")
generation_output = model.generate(
input_ids=input_ids,
generation_config=generation_config,
return_dict_in_generate=True,
output_scores=True,
max_new_tokens=256,
)
output = []
for s in generation_output.sequences:
outputs.append(tokenizer.decode(s))
print(tokenizer.decode(s))
output = (outputs[0].split('### Response:'))[1]
'''
user = history[-1][0]
response = f"You asked: {user}"
history[-1][1] = response
print(history)
return history
theme = gr.themes.Base(
primary_hue="indigo",
).set(
prose_text_size='*text_sm'
)
with gr.Blocks(title='Claimed', theme=theme) as demo:
gr.Markdown("""
# CLAIMED - A GENERATIVE TOOLKIT FOR PATENT ATTORNEYS
The patenting process can by incredibly time-consuming and expensive. We're on a mission to change that.
Welcome to our demo! We've trained Meta's Llama on over 200k entries, with a focus on tasks related to the intellectual property domain.
Please note that this is for research purposes and shouldn't be used commercially.
None of the outputs of this model, taken in part or in its entirety, constitutes legal advice. If you are seeking protection for you intellectual property, consult a registered patent/trademark attorney.
""")
with gr.Tab("Claim Drafter"):
gr.Markdown("""
Use this tool to turn your idea into a patent claim.
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your idea here!')
text2 = gr.Textbox(label="Output")
with gr.Tab("Description Generator"):
gr.Markdown("""
Use this tool to expand your patent claim into a description.
You can also use this tool to generate abstracts and give you ideas about the benefit of an invention by changing the settings in the dropdown menu.
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your idea here!')
text2 = gr.Textbox(label="Output")
with gr.Tab("Knowledge Graph"):
gr.Markdown("""
Use this tool to
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your idea here!')
text2 = gr.Textbox(label="Output")
with gr.Tab("Prosecution Ideator"):
gr.Markdown("""
Below is our
Example input: A device to help the visually impaired using proprioception.
Output:
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your idea here!')
text2 = gr.Textbox(label="Output")
with gr.Tab("Claimed Infill"):
gr.Markdown("""
Below is our
Example input: A device to help the visually impaired using proprioception.
Output:
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your idea here!')
text2 = gr.Textbox(label="Output")
with gr.Tab("CPC Classifier"):
gr.Markdown("""
Use this tool to classify your invention according to Cooperative Patent Classification system.
""")
with gr.Row(scale=1, min_width=600):
text1 = gr.Textbox(label="Input",
placeholder='Type in your Claim/Description/Abstract Here')
text2 = gr.Textbox(label="Output")
gr.Markdown("""
# THE CHATBOT
Do you want a bit more freedom over the outputs you generate? No worries, you can use a chatbot version of our model below. You can ask it anything.
If you're concerned about a particular output, hit the flag button and we will use that information to improve the model.
""")
chatbot = gr.Chatbot([], elem_id="Claimed Assistant").style(height=200)
with gr.Row():
with gr.Column(scale=0.85):
txt = gr.Textbox(
show_label=False,
placeholder="Enter text and submit",
).style(container=False)
with gr.Column(scale=0.15, min_width=0):
btn = gr.Button("Submit")
txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(
generateresponse, chatbot, chatbot
)
gr.Markdown("""
# HAVE AN IDEA? GET IT CLAIMED
In the future, we are looking to expand our model's capabilities further to assist in a range of IP related tasks.
If you are interested in using a more powerful model that we have trained, or if you have any suggestions of features you would like to see us add, please get in touch!
As far as data is concerned, you have nothing to worry about! We don't store any of your inputs to use for further training, we're not OpenAI.
""")
demo.launch()