Spaces:
Sleeping
Sleeping
File size: 1,298 Bytes
6d30494 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
from langchain.chains import ConversationChain
from langchain.prompts import PromptTemplate
def generate_prompt(input_variables: list, template_file: str):
"""
Generate a prompt from a template file and a list of input variables
"""
with open(template_file, 'r', encoding='utf-8') as source_file:
template = source_file.read()
prompt = PromptTemplate(template=template, input_variables=input_variables)
return prompt
def generate_conversation(memory: object,
llm: object,
prompt: object,
verbose: bool = False):
"""
Generate a conversation from a memory object, a language model object, and a prompt object
"""
conversation = ConversationChain(memory=memory,
llm=llm,
prompt=prompt,
verbose=verbose)
return conversation
def predict(input_text: str, conversation: object):
'''
Predict the next response from the conversation object
'''
response = conversation(input_text)
history = response['history']
history = history.split('\n')
prediction = response['response']
return {'history': history, 'prediction': prediction}
|