Spaces:
Sleeping
Sleeping
import os | |
import openai | |
from langchain.chat_models import ChatOpenAI | |
from langchain.memory import ConversationBufferMemory | |
from llm import generate_prompt, generate_conversation, predict | |
openai_api_key = os.environ['OPENAI_API_KEY'] | |
openai.api_key = openai_api_key | |
def get_job_description_conversation(): | |
''' | |
Generate a conversation object for job description generation | |
''' | |
prompt = generate_prompt( | |
input_variables=['history', 'input'], | |
template_file='templates/job_description_generation.txt') | |
llm = ChatOpenAI(temperature=0, openai_api_key=openai_api_key) | |
memory = ConversationBufferMemory(ai_prefix="JobGPT") | |
conversation = generate_conversation(memory=memory, llm=llm, prompt=prompt) | |
return conversation | |
def predict_job_description(input_text: str, conversation: object): | |
''' | |
Predict the next response from the conversation object | |
''' | |
response = predict(input_text=input_text, conversation=conversation) | |
return response | |