File size: 2,335 Bytes
fd61579 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import json
import torch
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
from transformers.generation.utils import GenerationConfig
st.set_page_config(page_title="Baichuan-13B-Chat")
st.title("Baichuan-13B-Chat")
@st.cache_resource
def init_model():
model = AutoModelForCausalLM.from_pretrained(
"baichuan-inc/Baichuan-13B-Chat",
torch_dtype=torch.float16,
device_map="auto",
trust_remote_code=True
)
model.generation_config = GenerationConfig.from_pretrained(
"baichuan-inc/Baichuan-13B-Chat"
)
tokenizer = AutoTokenizer.from_pretrained(
"baichuan-inc/Baichuan-13B-Chat",
use_fast=False,
trust_remote_code=True
)
model = model.quantize(8).cuda()
return model, tokenizer
def clear_chat_history():
del st.session_state.messages
def init_chat_history():
with st.chat_message("assistant", avatar='🤖'):
st.markdown("Greetings! I am the BaiChuan large language model, delighted to assist you.🥰")
if "messages" in st.session_state:
for message in st.session_state.messages:
avatar = '🧑💻' if message["role"] == "user" else '🤖'
with st.chat_message(message["role"], avatar=avatar):
st.markdown(message["content"])
else:
st.session_state.messages = []
return st.session_state.messages
def main():
model, tokenizer = init_model()
messages = init_chat_history()
if prompt := st.chat_input("Shift + Enter for a new line, Enter to send"):
with st.chat_message("user", avatar='🧑💻'):
st.markdown(prompt)
messages.append({"role": "user", "content": prompt})
print(f"[user] {prompt}", flush=True)
with st.chat_message("assistant", avatar='🤖'):
placeholder = st.empty()
for response in model.chat(tokenizer, messages, stream=True):
placeholder.markdown(response)
if torch.backends.mps.is_available():
torch.mps.empty_cache()
messages.append({"role": "assistant", "content": response})
print(json.dumps(messages, ensure_ascii=False), flush=True)
st.button("Reset Chat", on_click=clear_chat_history)
if __name__ == "__main__":
main()
|