c123ian commited on
Commit
3c3692a
1 Parent(s): f841eaf

deploy at 2024-08-16 15:13:36.558496

Browse files
Files changed (1) hide show
  1. main.py +16 -20
main.py CHANGED
@@ -1,17 +1,17 @@
1
  from fasthtml.common import *
2
- from fasthtml_hf import setup_hf_backup # Importing setup_hf_backup
3
- import openai
4
  import uvicorn
 
5
 
6
  # Set up the app, including daisyui and tailwind for the chat component
7
  tlink = Script(src="https://cdn.tailwindcss.com")
8
  dlink = Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css")
9
  app = FastHTML(hdrs=(tlink, dlink, picolink))
10
 
11
- # Set up OpenAI API (https://openai.com/)
12
- # openai.api_key = os.getenv("OPENAI_API_KEY")
13
- messages = []
14
 
 
15
 
16
  # Chat message component (renders a chat bubble)
17
  def ChatMessage(msg):
@@ -21,12 +21,12 @@ def ChatMessage(msg):
21
  Div(msg['content'], cls=f"chat-bubble {bubble_class}"),
22
  cls=f"chat {chat_class}")
23
 
24
- # The input field for the user message. Also used to clear the
25
  # input field after sending a message via an OOB swap
26
  def ChatInput():
27
- return Input(type="text", name='msg', id='msg-input',
28
- placeholder="Type a message",
29
- cls="input input-bordered w-full", hx_swap_oob='true')
30
 
31
  # The main screen
32
  @app.route("/")
@@ -45,24 +45,20 @@ def get():
45
  def post(msg:str):
46
  messages.append({"role":"user", "content":msg})
47
 
48
- # Get response from OpenAI API
49
- response = openai.chat.completions.create(
50
- model="gpt-3.5-turbo",
51
- messages=[
52
- {"role": "system", "content": "You are a helpful and concise assistant."},
53
- *messages
54
- ]
55
- )
56
 
57
- assistant_msg = response.choices[0].message.content.strip()
 
 
58
  messages.append({"role":"assistant", "content":assistant_msg})
59
 
60
  return (ChatMessage(messages[-2]), # The user's message
61
  ChatMessage(messages[-1]), # The chatbot's response
62
  ChatInput()) # And clear the input field via an OOB swap
63
 
64
-
65
- #serve()
66
  if __name__ == "__main__":
67
  setup_hf_backup(app)
68
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
  from fasthtml.common import *
2
+ from fasthtml_hf import setup_hf_backup
 
3
  import uvicorn
4
+ from transformers import pipeline
5
 
6
  # Set up the app, including daisyui and tailwind for the chat component
7
  tlink = Script(src="https://cdn.tailwindcss.com")
8
  dlink = Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css")
9
  app = FastHTML(hdrs=(tlink, dlink, picolink))
10
 
11
+ # Set up the text generation pipeline
12
+ pipe = pipeline("text-generation", model="ReliableAI/UCCIX-Llama2-13B-Instruct")
 
13
 
14
+ messages = []
15
 
16
  # Chat message component (renders a chat bubble)
17
  def ChatMessage(msg):
 
21
  Div(msg['content'], cls=f"chat-bubble {bubble_class}"),
22
  cls=f"chat {chat_class}")
23
 
24
+ # The input field for the user message. Also used to clear the
25
  # input field after sending a message via an OOB swap
26
  def ChatInput():
27
+ return Input(type="text", name='msg', id='msg-input',
28
+ placeholder="Type a message",
29
+ cls="input input-bordered w-full", hx_swap_oob='true')
30
 
31
  # The main screen
32
  @app.route("/")
 
45
  def post(msg:str):
46
  messages.append({"role":"user", "content":msg})
47
 
48
+ # Get response from the text generation pipeline
49
+ full_prompt = "You are a helpful and concise assistant.\n\n"
50
+ for m in messages:
51
+ full_prompt += f"{m['role'].capitalize()}: {m['content']}\nAssistant: "
 
 
 
 
52
 
53
+ response = pipe(full_prompt, max_length=2048, num_return_sequences=1)
54
+
55
+ assistant_msg = response[0]['generated_text'].split("Assistant: ")[-1].strip()
56
  messages.append({"role":"assistant", "content":assistant_msg})
57
 
58
  return (ChatMessage(messages[-2]), # The user's message
59
  ChatMessage(messages[-1]), # The chatbot's response
60
  ChatInput()) # And clear the input field via an OOB swap
61
 
 
 
62
  if __name__ == "__main__":
63
  setup_hf_backup(app)
64
  uvicorn.run(app, host="0.0.0.0", port=7860)