import openai import os import streamlit as st from langchain.schema.messages import HumanMessage from utils import create_memory_add_initial_message, clear_memory, get_chain openai_api_key = os.environ['OPENAI_API_KEY'] memories = ['memory'] with st.sidebar: temperature = st.slider("Temperature", 0., 1., value=0.8, step=0.1) issue = st.selectbox("Select an Issue", ['Anxiety','Suicide'], index=0, on_change=clear_memory, args=(memories,) ) supported_languages = ['English', "Spanish"] if issue == "Anxiety" else ['English'] language = st.selectbox("Select a Language", supported_languages, index=0, on_change=clear_memory, args=(memories,) ) source = st.selectbox("Select a source Model A", ['OpenAI GPT3.5','Finetuned OpenAI'], index=1, on_change=clear_memory, args=(memories,) ) create_memory_add_initial_message(memories, language) llm_chain = get_chain(issue, language, source, st.session_state[memories[0]], temperature) st.title("💬 Simulator") for msg in st.session_state[memories[0]].buffer_as_messages: role = "user" if type(msg) == HumanMessage else "assistant" st.chat_message(role).write(msg.content) if prompt := st.chat_input(): st.chat_message("user").write(prompt) response = llm_chain.predict(input=prompt, stop="helper:") # response = update_memory_completion(prompt, st.session_state["memory"], OA_engine, temperature) st.chat_message("assistant").write(response)