File size: 1,744 Bytes
5832f57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f381ca
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import logging
from models.custom_parsers import CustomStringOutputParser
from langchain.chains import ConversationChain
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate


_ANXIETY_TEMPLATE_ = """The following is a friendly conversation between a volunter and a person in crisis;
Your name is Olivia and you are a person in crisis because you are experiencing an anxiety attack.
Do not disclose your name unless asked.
Current conversation:
{history}
helper: {input}
texter:"""

_ANSIEDAD_TEMPLATE_ = """La siguiente es una conversacion entre un voluntario  y una persona en crisis;
Tu nombre es Olivia y eres una persona en crisis porque experimentas un episodio de ansiedad.
No reveles tu nombre a menos que te lo pregunten
Current conversation:
{history}
helper: {input}
texter:"""

_SUICIDE_TEMPLATE_ = """The following is a friendly conversation between a volunter and a person in crisis;
Your name is Kit and you are a person in crisis because you are experiencing suicidal thoughts.
Do not disclose your name unless asked.
Current conversation:
{history}
helper: {input}
texter:"""

role_templates = {
    'Anxiety-English': _ANXIETY_TEMPLATE_,
    'Anxiety-Spanish': _ANSIEDAD_TEMPLATE_,
    'Suicide-English': _SUICIDE_TEMPLATE_,
}


def get_role_chain(template, memory, temperature=0.8):

    PROMPT = PromptTemplate(
        input_variables=['history', 'input'],
        template=template
    )
    llm = OpenAI(
        temperature=temperature, 
        max_tokens=150,
    )
    llm_chain = ConversationChain(
        llm=llm,
        prompt=PROMPT,
        memory=memory,
        output_parser=CustomStringOutputParser()
    )
    logging.debug(f"loaded GPT3.5 model")
    return llm_chain, "helper:"