tree3po commited on
Commit
f1ba107
1 Parent(s): ec9e02c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -16
app.py CHANGED
@@ -100,11 +100,11 @@ def chat_inf_tree(system_prompt,prompt,history,client_choice,seed,temp,tokens,to
100
 
101
 
102
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
103
- if len(client_choice)>=hid_val:
104
  if system_prompt:
105
  system_prompt=f'{system_prompt}, '
106
  #client1=client_z[int(hid_val)-1]
107
- client1=gr.load(models[0])
108
  if not history:
109
  history = []
110
  hist_len=0
@@ -125,16 +125,16 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
125
  yield [(prompt,output)]
126
  history.append((prompt,output))
127
  yield history
128
- else:
129
- yield None
130
 
131
 
132
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
133
- if len(client_choice)>=hid_val:
134
  if system_prompt:
135
  system_prompt=f'{system_prompt}, '
136
  #client2=client_z[int(hid_val)-1]
137
- client2=gr.load(models[1])
138
  if not history:
139
  history = []
140
  hist_len=0
@@ -155,15 +155,15 @@ def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
155
  yield [(prompt,output)]
156
  history.append((prompt,output))
157
  yield history
158
- else:
159
- yield None
160
 
161
  def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
162
- if len(client_choice)>=hid_val:
163
  if system_prompt:
164
  system_prompt=f'{system_prompt}, '
165
  #client3=client_z[int(hid_val)-1]
166
- client3=gr.load(models[2])
167
  if not history:
168
  history = []
169
  hist_len=0
@@ -184,15 +184,15 @@ def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
184
  yield [(prompt,output)]
185
  history.append((prompt,output))
186
  yield history
187
- else:
188
- yield None
189
 
190
  def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
191
- if len(client_choice)>=hid_val:
192
  if system_prompt:
193
  system_prompt=f'{system_prompt}, '
194
  #client4=client_z[int(hid_val)-1]
195
- client4=gr.load(models[3])
196
  if not history:
197
  history = []
198
  hist_len=0
@@ -213,8 +213,8 @@ def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
213
  yield [(prompt,output)]
214
  history.append((prompt,output))
215
  yield history
216
- else:
217
- yield None
218
  def add_new_model(inp, cur):
219
  cur.append(inp)
220
  return cur,gr.update(choices=[z for z in cur])
 
100
 
101
 
102
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
103
+ #if len(client_choice)>=hid_val:
104
  if system_prompt:
105
  system_prompt=f'{system_prompt}, '
106
  #client1=client_z[int(hid_val)-1]
107
+ client1=gr.load("models/" + models[0])
108
  if not history:
109
  history = []
110
  hist_len=0
 
125
  yield [(prompt,output)]
126
  history.append((prompt,output))
127
  yield history
128
+ #else:
129
+ # yield None
130
 
131
 
132
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
133
+ #if len(client_choice)>=hid_val:
134
  if system_prompt:
135
  system_prompt=f'{system_prompt}, '
136
  #client2=client_z[int(hid_val)-1]
137
+ client2=gr.load("models/" + models[1])
138
  if not history:
139
  history = []
140
  hist_len=0
 
155
  yield [(prompt,output)]
156
  history.append((prompt,output))
157
  yield history
158
+ #else:
159
+ # yield None
160
 
161
  def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
162
+ #if len(client_choice)>=hid_val:
163
  if system_prompt:
164
  system_prompt=f'{system_prompt}, '
165
  #client3=client_z[int(hid_val)-1]
166
+ client3=gr.load("models/" + models[2])
167
  if not history:
168
  history = []
169
  hist_len=0
 
184
  yield [(prompt,output)]
185
  history.append((prompt,output))
186
  yield history
187
+ #else:
188
+ # yield None
189
 
190
  def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
191
+ #if len(client_choice)>=hid_val:
192
  if system_prompt:
193
  system_prompt=f'{system_prompt}, '
194
  #client4=client_z[int(hid_val)-1]
195
+ client4=gr.load("models/" + models[3])
196
  if not history:
197
  history = []
198
  hist_len=0
 
213
  yield [(prompt,output)]
214
  history.append((prompt,output))
215
  yield history
216
+ #else:
217
+ # yield None
218
  def add_new_model(inp, cur):
219
  cur.append(inp)
220
  return cur,gr.update(choices=[z for z in cur])