Spaces:
Sleeping
Sleeping
File size: 1,998 Bytes
8889163 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
#simple chatbot con Langchain y usando Amazon Bedrock.
import gradio as gr
import random
import langchain
import langchain_community
from langchain_aws import ChatBedrock
from langchain.chains import ConversationChain
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
#para manejar la memoria del chat
import uuid
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
#para langchain 0.2
store = {}
#funcion que retorna un id de sesion usando uuid
def get_chat_session_id():
return str(uuid.uuid4())
def get_session_history(session_id: str) -> BaseChatMessageHistory:
if session_id not in store:
store[session_id] = ChatMessageHistory()
return store[session_id]
def init():
#definir chat con memoria
chat = ChatBedrock(
model_id="anthropic.claude-3-sonnet-20240229-v1:0",
model_kwargs={"temperature": 0.1}
)
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"You're an assistant who's good at {ability}. Respond in 20 words or fewer",
),
MessagesPlaceholder(variable_name="history"),
("human", "{input}"),
]
)
runnable = prompt | chat
with_message_history = RunnableWithMessageHistory(
runnable,
get_session_history,
input_messages_key="input",
history_messages_key="history",
)
return with_message_history
def bedrock_response(message,history):
the_ability= "Filosofia"
response=the_chat.invoke(
{"ability": the_ability, "input": message},
config={"configurable": {"session_id": id_session}},
)
print(type(response))
print(response)
return response.content
id_session=get_chat_session_id()
the_chat = init()
demo = gr.ChatInterface(bedrock_response)
if __name__ == "__main__":
demo.launch()
|