Spaces:
Sleeping
Sleeping
import os | |
import openai | |
from langchain.chat_models import ChatOpenAI | |
from langchain.memory import ConversationBufferMemory | |
from llm import generate_prompt, generate_conversation, predict | |
from constants import TEMPERATURE, MODEL_NAME | |
openai_api_key = os.environ['OPENAI_API_KEY'] | |
openai.api_key = openai_api_key | |
def get_interview_questions_conversation(): | |
''' | |
Generate a conversation object for job description generation | |
''' | |
prompt = generate_prompt( | |
input_variables=['history', 'input'], | |
template_file='templates/interview_question_generator.txt') | |
llm = ChatOpenAI(temperature=TEMPERATURE, openai_api_key=openai_api_key, model=MODEL_NAME) | |
memory = ConversationBufferMemory(ai_prefix="JobGPT") | |
conversation = generate_conversation(memory=memory, llm=llm, prompt=prompt) | |
return conversation | |
def predict_interview_question(input_text: str, conversation: object): | |
''' | |
Predict the next response from the conversation object | |
''' | |
response = predict(input_text=input_text, conversation=conversation) | |
return response |