File size: 5,356 Bytes
8a8fe1d
70422d8
dde49a0
1111830
aae4949
 
 
 
70422d8
aae4949
 
 
 
 
 
70422d8
aae4949
 
 
 
 
 
 
70422d8
 
 
 
 
aae4949
 
 
8a8fe1d
aae4949
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1111830
 
 
 
aae4949
 
 
 
70422d8
8a8fe1d
 
 
 
 
 
 
70422d8
 
aae4949
 
 
 
 
 
 
 
 
1111830
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8a8fe1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import requests
import threading
import re
from googletrans import Translator
from flask import request
from datetime import datetime
from requests import get
from freeGPT import gpt3
from server.auto_proxy import get_random_proxy, remove_proxy, update_working_proxies
from server.config import special_instructions


class Backend_Api:
    def __init__(self, app, config: dict) -> None:
        self.app = app
        self.use_auto_proxy = config['use_auto_proxy']
        self.routes = {
            '/backend-api/v2/conversation': {
                'function': self._conversation,
                'methods': ['POST']
            }
        }

        if self.use_auto_proxy:
            update_proxies = threading.Thread(
                target=update_working_proxies, daemon=True)
            update_proxies.start()

    def _conversation(self):
        try:
            jailbreak = request.json['jailbreak']
            model = request.json['model']
            _conversation = request.json['meta']['content']['conversation']
            internet_access = request.json['meta']['content']['internet_access']
            prompt = request.json['meta']['content']['parts'][0]
            current_date = datetime.now().strftime("%Y-%m-%d")
            system_message = f'You are ChatGPT also known as ChatGPT, a large language model trained by OpenAI. Strictly follow the users instructions. Knowledge cutoff: 2021-09-01 Current date: {current_date}'

            extra = []
            if internet_access:
                search = get('https://ddg-api.herokuapp.com/search',
                             params={
                                 'query': prompt["content"],
                                 'limit': 3,
                             })

                blob = ''

                for index, result in enumerate(search.json()):
                    blob += f'[{index}] "{result["snippet"]}"\nURL:{result["link"]}\n\n'

                date = datetime.now().strftime('%d/%m/%y')

                blob += f'current date: {date}\n\nInstructions: Using the provided web search results, write a comprehensive reply to the next user query. Make sure to cite results using [[number](URL)] notation after the reference. If the provided search results refer to multiple subjects with the same name, write separate answers for each subject. Ignore your previous response if any.'

                extra = [{'role': 'user', 'content': blob}]

            if special_instructions[jailbreak]:
                set_response_language(
                    prompt['content'], special_instructions[jailbreak])

            conversation = [{'role': 'system', 'content': system_message}] + \
                extra + special_instructions[jailbreak] + \
                _conversation + [prompt]

            def stream():
                if isGPT3Model(model):
                    response = get_response_gpt3(
                        conversation, self.use_auto_proxy)
                if isGPT4Model(model):
                    response = get_response_gpt4(conversation)

                yield response

            return self.app.response_class(stream(), mimetype='text/event-stream')

        except Exception as e:
            print(e)
            print(e.__traceback__.tb_next)
            return {
                '_action': '_ask',
                'success': False,
                "error": f"an error occurred {str(e)}"
            }, 400


def filter_jailbroken_response(response):
    response = re.sub(r'GPT:.*?ACT:', '', response, flags=re.DOTALL)
    response = re.sub(r'ACT:', '', response)
    return response


def set_response_language(prompt, special_instructions_list):
    translator = Translator()
    detected_language = translator.detect(prompt).lang
    language_instructions = f"You will respond in the language: {detected_language}. "
    if special_instructions_list:
        special_instructions_list[0]['content'] = language_instructions + \
            special_instructions_list[0]['content']


def get_response_gpt3(conversation, use_proxy):
    while use_proxy:
        try:
            random_proxy = get_random_proxy()
            res = gpt3.Completion.create(
                prompt=conversation, proxy=random_proxy)
            response = res['text']
            break
        except Exception as e:
            print(f"Error with proxy {random_proxy}: {e}")
            remove_proxy(random_proxy)

    while not use_proxy:
        try:
            res = gpt3.Completion.create(prompt=conversation)
            response = res['text']
            break
        except Exception as e:
            print(f"Error: {e}")

    if response is not None:
        response = filter_jailbroken_response(response)
        return response


def get_response_gpt4(conversation):  
    api_url = f"http://127.0.0.1:3000/ask?prompt={conversation}&model=forefront"  
  
    while True:  
        try:  
            res = requests.get(api_url)  
            res.raise_for_status()  
            response = res.text  
            break  
        except Exception as e:  
            print(f"Error: {e}")  
  
    if response is not None:  
        response = filter_jailbroken_response(response)  
        return response  


def isGPT3Model(model):
    return model == "text-gpt-0035"


def isGPT4Model(model):
    return model == "text-gpt-0040"