deploy at 2024-08-16 15:07:47.537297
Browse files- main copy.py +64 -0
main copy.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fasthtml.common import *
|
2 |
+
from fasthtml_hf import setup_hf_backup
|
3 |
+
import uvicorn
|
4 |
+
from transformers import pipeline
|
5 |
+
|
6 |
+
# Set up the app, including daisyui and tailwind for the chat component
|
7 |
+
tlink = Script(src="https://cdn.tailwindcss.com")
|
8 |
+
dlink = Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css")
|
9 |
+
app = FastHTML(hdrs=(tlink, dlink, picolink))
|
10 |
+
|
11 |
+
# Set up the text generation pipeline
|
12 |
+
pipe = pipeline("text-generation", model="ReliableAI/UCCIX-Llama2-13B-Instruct")
|
13 |
+
|
14 |
+
messages = []
|
15 |
+
|
16 |
+
# Chat message component (renders a chat bubble)
|
17 |
+
def ChatMessage(msg):
|
18 |
+
bubble_class = f"chat-bubble-{'primary' if msg['role'] == 'user' else 'secondary'}"
|
19 |
+
chat_class = f"chat-{'end' if msg['role'] == 'user' else 'start'}"
|
20 |
+
return Div(Div(msg['role'], cls="chat-header"),
|
21 |
+
Div(msg['content'], cls=f"chat-bubble {bubble_class}"),
|
22 |
+
cls=f"chat {chat_class}")
|
23 |
+
|
24 |
+
# The input field for the user message. Also used to clear the
|
25 |
+
# input field after sending a message via an OOB swap
|
26 |
+
def ChatInput():
|
27 |
+
return Input(type="text", name='msg', id='msg-input',
|
28 |
+
placeholder="Type a message",
|
29 |
+
cls="input input-bordered w-full", hx_swap_oob='true')
|
30 |
+
|
31 |
+
# The main screen
|
32 |
+
@app.route("/")
|
33 |
+
def get():
|
34 |
+
page = Body(H1('Chatbot Demo'),
|
35 |
+
Div(*[ChatMessage(msg) for msg in messages],
|
36 |
+
id="chatlist", cls="chat-box h-[73vh] overflow-y-auto"),
|
37 |
+
Form(Group(ChatInput(), Button("Send", cls="btn btn-primary")),
|
38 |
+
hx_post="/", hx_target="#chatlist", hx_swap="beforeend",
|
39 |
+
cls="flex space-x-2 mt-2",
|
40 |
+
), cls="p-4 max-w-lg mx-auto")
|
41 |
+
return Title('Chatbot Demo'), page
|
42 |
+
|
43 |
+
# Handle the form submission
|
44 |
+
@app.post("/")
|
45 |
+
def post(msg:str):
|
46 |
+
messages.append({"role":"user", "content":msg})
|
47 |
+
|
48 |
+
# Get response from the text generation pipeline
|
49 |
+
full_prompt = "You are a helpful and concise assistant.\n\n"
|
50 |
+
for m in messages:
|
51 |
+
full_prompt += f"{m['role'].capitalize()}: {m['content']}\nAssistant: "
|
52 |
+
|
53 |
+
response = pipe(full_prompt, max_length=2048, num_return_sequences=1)
|
54 |
+
|
55 |
+
assistant_msg = response[0]['generated_text'].split("Assistant: ")[-1].strip()
|
56 |
+
messages.append({"role":"assistant", "content":assistant_msg})
|
57 |
+
|
58 |
+
return (ChatMessage(messages[-2]), # The user's message
|
59 |
+
ChatMessage(messages[-1]), # The chatbot's response
|
60 |
+
ChatInput()) # And clear the input field via an OOB swap
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
setup_hf_backup(app)
|
64 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|