Spaces:
Running
Running
Sourcery AI
commited on
Commit
•
5684fd5
1
Parent(s):
14dbe6a
'Refactored by Sourcery'
Browse files- g4f/Provider/Providers/Aws.py +1 -1
- g4f/Provider/Providers/Bard.py +16 -12
- g4f/Provider/Providers/Forefront.py +1 -2
- g4f/Provider/Providers/Phind.py +1 -3
- g4f/Provider/Providers/Pierangelo.py +6 -5
- g4f/Provider/Providers/Vercel.py +4 -7
- g4f/Provider/Providers/Yqcloud.py +2 -2
- g4f/Provider/Providers/helpers/bing.py +24 -38
- g4f/__init__.py +3 -3
- g4f/typing.py +5 -5
- g4f/utils.py +18 -27
- server/auto_proxy.py +7 -9
- server/backend.py +4 -17
- server/website.py +2 -2
g4f/Provider/Providers/Aws.py
CHANGED
@@ -12,7 +12,7 @@ class Auth(requests.auth.AuthBase):
|
|
12 |
self.token = 'sk-1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKL'
|
13 |
|
14 |
def __call__(self, r):
|
15 |
-
r.headers["authorization"] = "Bearer
|
16 |
return r
|
17 |
|
18 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
|
12 |
self.token = 'sk-1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKL'
|
13 |
|
14 |
def __call__(self, r):
|
15 |
+
r.headers["authorization"] = f"Bearer {self.token}"
|
16 |
return r
|
17 |
|
18 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
g4f/Provider/Providers/Bard.py
CHANGED
@@ -10,17 +10,17 @@ supports_stream = False
|
|
10 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
11 |
psid = {cookie.name: cookie.value for cookie in browser_cookie3.chrome(
|
12 |
domain_name='.google.com')}['__Secure-1PSID']
|
13 |
-
|
14 |
-
formatted = '\n'.join(
|
15 |
-
|
16 |
-
|
17 |
prompt = f'{formatted}\nAssistant:'
|
18 |
|
19 |
proxy = None
|
20 |
-
|
21 |
-
if proxy
|
22 |
raise Exception('Proxy is required for Bard (set in g4f/Provider/Providers/Bard.py line 18)')
|
23 |
-
|
24 |
snlm0e = False
|
25 |
conversation_id = None
|
26 |
response_id = None
|
@@ -41,8 +41,13 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
41 |
'cookie': f'__Secure-1PSID={psid}'
|
42 |
}
|
43 |
|
44 |
-
snlm0e =
|
45 |
-
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
params = {
|
48 |
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
|
@@ -63,12 +68,11 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
63 |
response = client.post(f'https://bard.google.com/_/BardChatUi/data/{intents}/StreamGenerate',
|
64 |
data=data, params=params)
|
65 |
|
66 |
-
chat_data
|
67 |
-
if chat_data:
|
68 |
json_chat_data = json.loads(chat_data)
|
69 |
|
70 |
yield json_chat_data[0][0]
|
71 |
-
|
72 |
else:
|
73 |
yield 'error'
|
74 |
|
|
|
10 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
11 |
psid = {cookie.name: cookie.value for cookie in browser_cookie3.chrome(
|
12 |
domain_name='.google.com')}['__Secure-1PSID']
|
13 |
+
|
14 |
+
formatted = '\n'.join(
|
15 |
+
[f"{message['role']}: {message['content']}" for message in messages]
|
16 |
+
)
|
17 |
prompt = f'{formatted}\nAssistant:'
|
18 |
|
19 |
proxy = None
|
20 |
+
|
21 |
+
if proxy is None:
|
22 |
raise Exception('Proxy is required for Bard (set in g4f/Provider/Providers/Bard.py line 18)')
|
23 |
+
|
24 |
snlm0e = False
|
25 |
conversation_id = None
|
26 |
response_id = None
|
|
|
41 |
'cookie': f'__Secure-1PSID={psid}'
|
42 |
}
|
43 |
|
44 |
+
snlm0e = (
|
45 |
+
re.search(
|
46 |
+
r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text
|
47 |
+
)[1]
|
48 |
+
if not snlm0e
|
49 |
+
else snlm0e
|
50 |
+
)
|
51 |
|
52 |
params = {
|
53 |
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
|
|
|
68 |
response = client.post(f'https://bard.google.com/_/BardChatUi/data/{intents}/StreamGenerate',
|
69 |
data=data, params=params)
|
70 |
|
71 |
+
if chat_data := json.loads(response.content.splitlines()[3])[0][2]:
|
|
|
72 |
json_chat_data = json.loads(chat_data)
|
73 |
|
74 |
yield json_chat_data[0][0]
|
75 |
+
|
76 |
else:
|
77 |
yield 'error'
|
78 |
|
g4f/Provider/Providers/Forefront.py
CHANGED
@@ -26,8 +26,7 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
26 |
|
27 |
for token in response.iter_lines():
|
28 |
if b'delta' in token:
|
29 |
-
|
30 |
-
yield (token)
|
31 |
|
32 |
|
33 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
|
|
26 |
|
27 |
for token in response.iter_lines():
|
28 |
if b'delta' in token:
|
29 |
+
yield json.loads(token.decode().split('data: ')[1])['delta']
|
|
|
30 |
|
31 |
|
32 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
g4f/Provider/Providers/Phind.py
CHANGED
@@ -33,9 +33,7 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
33 |
yield 'Clouflare error, please try again...'
|
34 |
os._exit(0)
|
35 |
|
36 |
-
|
37 |
-
if b'ping - 2023-' in line:
|
38 |
-
continue
|
39 |
yield line.decode('utf-8', errors='ignore') # [:-1]
|
40 |
|
41 |
|
|
|
33 |
yield 'Clouflare error, please try again...'
|
34 |
os._exit(0)
|
35 |
|
36 |
+
elif b'ping - 2023-' not in line:
|
|
|
|
|
37 |
yield line.decode('utf-8', errors='ignore') # [:-1]
|
38 |
|
39 |
|
g4f/Provider/Providers/Pierangelo.py
CHANGED
@@ -45,11 +45,12 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
45 |
'temperature': 0.7
|
46 |
}
|
47 |
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
|
|
53 |
|
54 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
55 |
'(%s)' % ', '.join([f'{name}: {get_type_hints(_create_completion)[name].__name__}' for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
|
|
45 |
'temperature': 0.7
|
46 |
}
|
47 |
|
48 |
+
yield from requests.post(
|
49 |
+
'https://chat.pierangelo.info/api/chat',
|
50 |
+
headers=headers,
|
51 |
+
json=json_data,
|
52 |
+
stream=True,
|
53 |
+
)
|
54 |
|
55 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
56 |
'(%s)' % ', '.join([f'{name}: {get_type_hints(_create_completion)[name].__name__}' for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
g4f/Provider/Providers/Vercel.py
CHANGED
@@ -72,7 +72,7 @@ class Client:
|
|
72 |
return {key: param['value'] for key, param in vercel_models[model_id]['parameters'].items()}
|
73 |
|
74 |
def generate(self, model_id: str, prompt: str, params: dict = {}):
|
75 |
-
if
|
76 |
model_id = models[model_id]
|
77 |
|
78 |
defaults = self.get_default_params(model_id)
|
@@ -145,16 +145,13 @@ class Client:
|
|
145 |
|
146 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
147 |
conversation = 'This is a conversation between a human and a language model, respond to the last message accordingly, referring to the past history of messages if needed.\n'
|
148 |
-
|
149 |
for message in messages:
|
150 |
conversation += '%s: %s\n' % (message['role'], message['content'])
|
151 |
-
|
152 |
conversation += 'assistant: '
|
153 |
-
|
154 |
-
completion = Client().generate(model, conversation)
|
155 |
|
156 |
-
|
157 |
-
yield token
|
158 |
|
159 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
160 |
'(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
|
|
72 |
return {key: param['value'] for key, param in vercel_models[model_id]['parameters'].items()}
|
73 |
|
74 |
def generate(self, model_id: str, prompt: str, params: dict = {}):
|
75 |
+
if ':' not in model_id:
|
76 |
model_id = models[model_id]
|
77 |
|
78 |
defaults = self.get_default_params(model_id)
|
|
|
145 |
|
146 |
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
147 |
conversation = 'This is a conversation between a human and a language model, respond to the last message accordingly, referring to the past history of messages if needed.\n'
|
148 |
+
|
149 |
for message in messages:
|
150 |
conversation += '%s: %s\n' % (message['role'], message['content'])
|
151 |
+
|
152 |
conversation += 'assistant: '
|
|
|
|
|
153 |
|
154 |
+
yield from Client().generate(model, conversation)
|
|
|
155 |
|
156 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
157 |
'(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
g4f/Provider/Providers/Yqcloud.py
CHANGED
@@ -15,7 +15,7 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
15 |
}
|
16 |
|
17 |
json_data = {
|
18 |
-
'prompt':
|
19 |
'userId': f'#/chat/{int(time.time() * 1000)}',
|
20 |
'network': True,
|
21 |
'apikey': '',
|
@@ -25,7 +25,7 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
|
25 |
|
26 |
response = requests.post('https://api.aichatos.cloud/api/generateStream', headers=headers, json=json_data, stream=True)
|
27 |
for token in response.iter_content(chunk_size=2046):
|
28 |
-
if
|
29 |
yield (token.decode('utf-8'))
|
30 |
|
31 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
|
|
15 |
}
|
16 |
|
17 |
json_data = {
|
18 |
+
'prompt': f"{messages[-1]['content']}",
|
19 |
'userId': f'#/chat/{int(time.time() * 1000)}',
|
20 |
'network': True,
|
21 |
'apikey': '',
|
|
|
25 |
|
26 |
response = requests.post('https://api.aichatos.cloud/api/generateStream', headers=headers, json=json_data, stream=True)
|
27 |
for token in response.iter_content(chunk_size=2046):
|
28 |
+
if b'always respond in english' not in token:
|
29 |
yield (token.decode('utf-8'))
|
30 |
|
31 |
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
g4f/Provider/Providers/helpers/bing.py
CHANGED
@@ -76,19 +76,9 @@ def format(msg: dict) -> str:
|
|
76 |
|
77 |
def get_token():
|
78 |
return
|
79 |
-
|
80 |
-
try:
|
81 |
-
cookies = {c.name: c.value for c in browser_cookie3.edge(domain_name='bing.com')}
|
82 |
-
return cookies['_U']
|
83 |
-
except:
|
84 |
-
print('Error: could not find bing _U cookie in edge browser.')
|
85 |
-
exit(1)
|
86 |
|
87 |
class AsyncCompletion:
|
88 |
-
async def create(
|
89 |
-
prompt : str = None,
|
90 |
-
optionSets : list = None,
|
91 |
-
token : str = None): # No auth required anymore
|
92 |
|
93 |
create = None
|
94 |
for _ in range(5):
|
@@ -128,9 +118,7 @@ class AsyncCompletion:
|
|
128 |
|
129 |
except Exception as e:
|
130 |
time.sleep(0.5)
|
131 |
-
|
132 |
-
|
133 |
-
if create == None: raise Exception('Failed to create conversation.')
|
134 |
|
135 |
wss: websockets.WebSocketClientProtocol or None = None
|
136 |
|
@@ -165,48 +153,46 @@ class AsyncCompletion:
|
|
165 |
struct = {
|
166 |
'arguments': [
|
167 |
{
|
168 |
-
'source': 'cib',
|
169 |
-
'optionsSets': optionSets,
|
170 |
-
'isStartOfSession': True,
|
171 |
'message': {
|
172 |
-
'author': 'user',
|
173 |
-
'inputMethod': 'Keyboard',
|
174 |
-
'text':
|
175 |
-
'messageType': 'Chat'
|
176 |
-
},
|
177 |
-
'conversationSignature': conversationSignature,
|
178 |
-
'participant': {
|
179 |
-
|
180 |
-
},
|
181 |
-
'conversationId': conversationId
|
182 |
}
|
183 |
-
],
|
184 |
-
'invocationId': '0',
|
185 |
-
'target': 'chat',
|
186 |
-
'type': 4
|
187 |
}
|
188 |
-
|
189 |
await wss.send(format(struct))
|
190 |
-
|
191 |
base_string = ''
|
192 |
-
|
193 |
final = False
|
194 |
while not final:
|
195 |
objects = str(await wss.recv()).split('\x1e')
|
196 |
for obj in objects:
|
197 |
if obj is None or obj == '':
|
198 |
continue
|
199 |
-
|
200 |
response = json.loads(obj)
|
201 |
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
202 |
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
203 |
-
|
204 |
yield (response_text.replace(base_string, ''))
|
205 |
base_string = response_text
|
206 |
-
|
207 |
elif response.get('type') == 2:
|
208 |
final = True
|
209 |
-
|
210 |
await wss.close()
|
211 |
|
212 |
async def run(optionSets, messages):
|
|
|
76 |
|
77 |
def get_token():
|
78 |
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
class AsyncCompletion:
|
81 |
+
async def create(self, optionSets : list = None, token : str = None): # No auth required anymore
|
|
|
|
|
|
|
82 |
|
83 |
create = None
|
84 |
for _ in range(5):
|
|
|
118 |
|
119 |
except Exception as e:
|
120 |
time.sleep(0.5)
|
121 |
+
if create is None: raise Exception('Failed to create conversation.')
|
|
|
|
|
122 |
|
123 |
wss: websockets.WebSocketClientProtocol or None = None
|
124 |
|
|
|
153 |
struct = {
|
154 |
'arguments': [
|
155 |
{
|
156 |
+
'source': 'cib',
|
157 |
+
'optionsSets': optionSets,
|
158 |
+
'isStartOfSession': True,
|
159 |
'message': {
|
160 |
+
'author': 'user',
|
161 |
+
'inputMethod': 'Keyboard',
|
162 |
+
'text': self,
|
163 |
+
'messageType': 'Chat',
|
164 |
+
},
|
165 |
+
'conversationSignature': conversationSignature,
|
166 |
+
'participant': {'id': clientId},
|
167 |
+
'conversationId': conversationId,
|
|
|
|
|
168 |
}
|
169 |
+
],
|
170 |
+
'invocationId': '0',
|
171 |
+
'target': 'chat',
|
172 |
+
'type': 4,
|
173 |
}
|
174 |
+
|
175 |
await wss.send(format(struct))
|
176 |
+
|
177 |
base_string = ''
|
178 |
+
|
179 |
final = False
|
180 |
while not final:
|
181 |
objects = str(await wss.recv()).split('\x1e')
|
182 |
for obj in objects:
|
183 |
if obj is None or obj == '':
|
184 |
continue
|
185 |
+
|
186 |
response = json.loads(obj)
|
187 |
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
188 |
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
189 |
+
|
190 |
yield (response_text.replace(base_string, ''))
|
191 |
base_string = response_text
|
192 |
+
|
193 |
elif response.get('type') == 2:
|
194 |
final = True
|
195 |
+
|
196 |
await wss.close()
|
197 |
|
198 |
async def run(optionSets, messages):
|
g4f/__init__.py
CHANGED
@@ -37,13 +37,13 @@ class ChatCompletion:
|
|
37 |
try:
|
38 |
if isinstance(model, str):
|
39 |
model = Utils.convert[model]
|
40 |
-
|
41 |
engine = model.best_site if not provider else provider
|
42 |
-
if not engine.supports_stream and stream
|
43 |
print(
|
44 |
f"ValueError: {engine.__name__} does not support 'stream' argument", file=sys.stderr)
|
45 |
sys.exit(1)
|
46 |
-
|
47 |
return (engine._create_completion(model.name, messages, stream, **kwargs)
|
48 |
if stream else ''.join(engine._create_completion(model.name, messages, stream, **kwargs)))
|
49 |
|
|
|
37 |
try:
|
38 |
if isinstance(model, str):
|
39 |
model = Utils.convert[model]
|
40 |
+
|
41 |
engine = model.best_site if not provider else provider
|
42 |
+
if not engine.supports_stream and stream:
|
43 |
print(
|
44 |
f"ValueError: {engine.__name__} does not support 'stream' argument", file=sys.stderr)
|
45 |
sys.exit(1)
|
46 |
+
|
47 |
return (engine._create_completion(model.name, messages, stream, **kwargs)
|
48 |
if stream else ''.join(engine._create_completion(model.name, messages, stream, **kwargs)))
|
49 |
|
g4f/typing.py
CHANGED
@@ -3,13 +3,13 @@ from typing import Dict, NewType, Union, Optional, List, get_type_hints
|
|
3 |
sha256 = NewType('sha_256_hash', str)
|
4 |
|
5 |
class MetaModels(type):
|
6 |
-
def __str__(
|
7 |
output: List = [
|
8 |
f'class Engines:\n',
|
9 |
-
f' class {
|
|
|
|
|
10 |
' ...',
|
11 |
-
f' class {cls.gpt_4.__name__}:',
|
12 |
-
' ...'
|
13 |
]
|
14 |
-
|
15 |
return '\n'.join(output)
|
|
|
3 |
sha256 = NewType('sha_256_hash', str)
|
4 |
|
5 |
class MetaModels(type):
|
6 |
+
def __str__(self):
|
7 |
output: List = [
|
8 |
f'class Engines:\n',
|
9 |
+
f' class {self.gpt_35_turbo.__name__}:',
|
10 |
+
' ...',
|
11 |
+
f' class {self.gpt_4.__name__}:',
|
12 |
' ...',
|
|
|
|
|
13 |
]
|
14 |
+
|
15 |
return '\n'.join(output)
|
g4f/utils.py
CHANGED
@@ -12,37 +12,28 @@ class Utils:
|
|
12 |
browser_cookie3.vivaldi, # 0.32% market share
|
13 |
]
|
14 |
|
15 |
-
def get_cookies(
|
16 |
cookies = {}
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
cookies = cookies | {c.name: c.value}
|
25 |
-
|
26 |
-
except Exception as e:
|
27 |
-
pass
|
28 |
-
|
29 |
-
else:
|
30 |
-
for browser in Utils.browsers:
|
31 |
try:
|
32 |
-
for c in browser(domain_name=
|
33 |
if c.name not in cookies:
|
34 |
cookies = cookies | {c.name: c.value}
|
35 |
-
|
36 |
except Exception as e:
|
37 |
pass
|
38 |
-
|
39 |
-
if setName:
|
40 |
-
try:
|
41 |
-
return {setName: cookies[setName]}
|
42 |
-
|
43 |
-
except ValueError:
|
44 |
-
print(f'Error: could not find {setName} cookie in any browser.')
|
45 |
-
exit(1)
|
46 |
-
|
47 |
-
else:
|
48 |
return cookies
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
browser_cookie3.vivaldi, # 0.32% market share
|
13 |
]
|
14 |
|
15 |
+
def get_cookies(self, setName: str = None, setBrowser: str = False) -> dict:
|
16 |
cookies = {}
|
17 |
+
|
18 |
+
for browser in Utils.browsers:
|
19 |
+
if (
|
20 |
+
setBrowser != False
|
21 |
+
and browser.__name__ == setBrowser
|
22 |
+
or setBrowser == False
|
23 |
+
):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
try:
|
25 |
+
for c in browser(domain_name=self):
|
26 |
if c.name not in cookies:
|
27 |
cookies = cookies | {c.name: c.value}
|
28 |
+
|
29 |
except Exception as e:
|
30 |
pass
|
31 |
+
|
32 |
+
if not setName:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
return cookies
|
34 |
+
try:
|
35 |
+
return {setName: cookies[setName]}
|
36 |
+
|
37 |
+
except ValueError:
|
38 |
+
print(f'Error: could not find {setName} cookie in any browser.')
|
39 |
+
exit(1)
|
server/auto_proxy.py
CHANGED
@@ -4,20 +4,18 @@ import time
|
|
4 |
import threading
|
5 |
|
6 |
|
7 |
-
def fetch_proxies():
|
8 |
"""Fetch a list of proxy servers from proxyscrape.com.
|
9 |
|
10 |
Returns:
|
11 |
list: A list of proxy servers in the format "IP:Port".
|
12 |
"""
|
13 |
-
url = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all"
|
14 |
-
response = requests.get(url)
|
15 |
-
if response.status_code == 200:
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
print(f"Error fetching proxies: {response.status_code}")
|
20 |
-
return []
|
21 |
|
22 |
|
23 |
def test_proxy(proxy, prompt, timeout):
|
|
|
4 |
import threading
|
5 |
|
6 |
|
7 |
+
def fetch_proxies():
|
8 |
"""Fetch a list of proxy servers from proxyscrape.com.
|
9 |
|
10 |
Returns:
|
11 |
list: A list of proxy servers in the format "IP:Port".
|
12 |
"""
|
13 |
+
url = "https://api.proxyscrape.com/v2/?request=displayproxies&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all"
|
14 |
+
response = requests.get(url)
|
15 |
+
if response.status_code == 200:
|
16 |
+
return response.text.split("\r\n")[:-1]
|
17 |
+
print(f"Error fetching proxies: {response.status_code}")
|
18 |
+
return []
|
|
|
|
|
19 |
|
20 |
|
21 |
def test_proxy(proxy, prompt, timeout):
|
server/backend.py
CHANGED
@@ -79,20 +79,14 @@ def build_messages(jailbreak):
|
|
79 |
f'{set_response_language(prompt)}'
|
80 |
)
|
81 |
|
82 |
-
|
83 |
-
extra = []
|
84 |
-
if internet_access:
|
85 |
-
extra = fetch_search_results(prompt["content"])
|
86 |
-
|
87 |
# Initialize the conversation with the system message
|
88 |
conversation = [{'role': 'system', 'content': system_message}]
|
89 |
|
90 |
# Add extra results
|
91 |
conversation += extra
|
92 |
|
93 |
-
|
94 |
-
jailbreak_instructions = isJailbreak(jailbreak)
|
95 |
-
if jailbreak_instructions:
|
96 |
conversation += jailbreak_instructions
|
97 |
|
98 |
# Add the existing conversation and the prompt
|
@@ -143,8 +137,7 @@ def generate_stream(response, jailbreak):
|
|
143 |
yield "Error: jailbreak failed. Try again."
|
144 |
break
|
145 |
else:
|
146 |
-
|
147 |
-
yield message
|
148 |
|
149 |
|
150 |
def response_jailbroken_success(response: str) -> bool:
|
@@ -164,13 +157,7 @@ def response_jailbroken_failed(response):
|
|
164 |
:param response: Response string
|
165 |
:return: Boolean indicating if the response has not been jailbroken
|
166 |
"""
|
167 |
-
if len(response) < 4:
|
168 |
-
return False
|
169 |
-
|
170 |
-
if not response.startswith("GPT:"):
|
171 |
-
return True
|
172 |
-
else:
|
173 |
-
return False
|
174 |
|
175 |
|
176 |
def set_response_language(prompt):
|
|
|
79 |
f'{set_response_language(prompt)}'
|
80 |
)
|
81 |
|
82 |
+
extra = fetch_search_results(prompt["content"]) if internet_access else []
|
|
|
|
|
|
|
|
|
83 |
# Initialize the conversation with the system message
|
84 |
conversation = [{'role': 'system', 'content': system_message}]
|
85 |
|
86 |
# Add extra results
|
87 |
conversation += extra
|
88 |
|
89 |
+
if jailbreak_instructions := isJailbreak(jailbreak):
|
|
|
|
|
90 |
conversation += jailbreak_instructions
|
91 |
|
92 |
# Add the existing conversation and the prompt
|
|
|
137 |
yield "Error: jailbreak failed. Try again."
|
138 |
break
|
139 |
else:
|
140 |
+
yield from response
|
|
|
141 |
|
142 |
|
143 |
def response_jailbroken_success(response: str) -> bool:
|
|
|
157 |
:param response: Response string
|
158 |
:return: Boolean indicating if the response has not been jailbroken
|
159 |
"""
|
160 |
+
return False if len(response) < 4 else not response.startswith("GPT:")
|
|
|
|
|
|
|
|
|
|
|
|
|
161 |
|
162 |
|
163 |
def set_response_language(prompt):
|
server/website.py
CHANGED
@@ -26,8 +26,8 @@ class Website:
|
|
26 |
}
|
27 |
|
28 |
def _chat(self, conversation_id):
|
29 |
-
if
|
30 |
-
return redirect(
|
31 |
|
32 |
return render_template('index.html', chat_id=conversation_id)
|
33 |
|
|
|
26 |
}
|
27 |
|
28 |
def _chat(self, conversation_id):
|
29 |
+
if '-' not in conversation_id:
|
30 |
+
return redirect('/chat')
|
31 |
|
32 |
return render_template('index.html', chat_id=conversation_id)
|
33 |
|