Spaces:
Sleeping
Sleeping
#simple chatbot con Langchain y usando Amazon Bedrock. | |
import gradio as gr | |
import random | |
import langchain | |
import langchain_community | |
from langchain_aws import ChatBedrock | |
from langchain.chains import ConversationChain | |
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | |
#para manejar la memoria del chat | |
import uuid | |
from langchain_community.chat_message_histories import ChatMessageHistory | |
from langchain_core.chat_history import BaseChatMessageHistory | |
from langchain_core.runnables.history import RunnableWithMessageHistory | |
#para langchain 0.2 | |
store = {} | |
#funcion que retorna un id de sesion usando uuid | |
def get_chat_session_id(): | |
return str(uuid.uuid4()) | |
def get_session_history(session_id: str) -> BaseChatMessageHistory: | |
if session_id not in store: | |
store[session_id] = ChatMessageHistory() | |
return store[session_id] | |
def init(): | |
#definir chat con memoria | |
chat = ChatBedrock( | |
model_id="anthropic.claude-3-sonnet-20240229-v1:0", | |
model_kwargs={"temperature": 0.1} | |
) | |
prompt = ChatPromptTemplate.from_messages( | |
[ | |
( | |
"system", | |
"You're an assistant who's good at {ability}. Respond in 20 words or fewer", | |
), | |
MessagesPlaceholder(variable_name="history"), | |
("human", "{input}"), | |
] | |
) | |
runnable = prompt | chat | |
with_message_history = RunnableWithMessageHistory( | |
runnable, | |
get_session_history, | |
input_messages_key="input", | |
history_messages_key="history", | |
) | |
return with_message_history | |
def bedrock_response(message,history): | |
the_ability= "Filosofia" | |
response=the_chat.invoke( | |
{"ability": the_ability, "input": message}, | |
config={"configurable": {"session_id": id_session}}, | |
) | |
print(type(response)) | |
print(response) | |
return response.content | |
id_session=get_chat_session_id() | |
the_chat = init() | |
demo = gr.ChatInterface(bedrock_response) | |
if __name__ == "__main__": | |
demo.launch() | |