tree3po commited on
Commit
3208b6e
1 Parent(s): 6bcbff1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -20
app.py CHANGED
@@ -70,8 +70,8 @@ def format_prompt_choose(message, history, model_name, new_models=None):
70
  mega_hist=[[],[],[],[]]
71
  def chat_inf_tree(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
72
  if len(client_choice)>=hid_val:
73
- #client=client_z[int(hid_val)-1]
74
- client = gr.load()
75
  if history:
76
  mega_hist[hid_val-1]=history
77
  #history = []
@@ -100,7 +100,7 @@ def chat_inf_tree(system_prompt,prompt,history,client_choice,seed,temp,tokens,to
100
 
101
 
102
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
103
- #if len(client_choice)>=hid_val:
104
  if system_prompt:
105
  system_prompt=f'{system_prompt}, '
106
  #client1=client_z[int(hid_val)-1]
@@ -125,16 +125,16 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
125
  yield [(prompt,output)]
126
  history.append((prompt,output))
127
  yield history
128
- #else:
129
- # yield None
130
 
131
 
132
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
133
- #if len(client_choice)>=hid_val:
134
  if system_prompt:
135
  system_prompt=f'{system_prompt}, '
136
- #client2=client_z[int(hid_val)-1]
137
- client2=gr.load("models/" + models[1])
138
  if not history:
139
  history = []
140
  hist_len=0
@@ -155,15 +155,15 @@ def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
155
  yield [(prompt,output)]
156
  history.append((prompt,output))
157
  yield history
158
- #else:
159
- # yield None
160
 
161
  def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
162
- #if len(client_choice)>=hid_val:
163
  if system_prompt:
164
  system_prompt=f'{system_prompt}, '
165
- #client3=client_z[int(hid_val)-1]
166
- client3=gr.load("models/" + models[2])
167
  if not history:
168
  history = []
169
  hist_len=0
@@ -184,15 +184,15 @@ def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
184
  yield [(prompt,output)]
185
  history.append((prompt,output))
186
  yield history
187
- #else:
188
- # yield None
189
 
190
  def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
191
- #if len(client_choice)>=hid_val:
192
  if system_prompt:
193
  system_prompt=f'{system_prompt}, '
194
- #client4=client_z[int(hid_val)-1]
195
- client4=gr.load("models/" + models[3])
196
  if not history:
197
  history = []
198
  hist_len=0
@@ -213,8 +213,8 @@ def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
213
  yield [(prompt,output)]
214
  history.append((prompt,output))
215
  yield history
216
- #else:
217
- # yield None
218
  def add_new_model(inp, cur):
219
  cur.append(inp)
220
  return cur,gr.update(choices=[z for z in cur])
 
70
  mega_hist=[[],[],[],[]]
71
  def chat_inf_tree(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
72
  if len(client_choice)>=hid_val:
73
+ client=client_z[int(hid_val)-1]
74
+ #client = gr.load()
75
  if history:
76
  mega_hist[hid_val-1]=history
77
  #history = []
 
100
 
101
 
102
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
103
+ if len(client_choice)>=hid_val:
104
  if system_prompt:
105
  system_prompt=f'{system_prompt}, '
106
  #client1=client_z[int(hid_val)-1]
 
125
  yield [(prompt,output)]
126
  history.append((prompt,output))
127
  yield history
128
+ else:
129
+ yield None
130
 
131
 
132
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
133
+ if len(client_choice)>=hid_val:
134
  if system_prompt:
135
  system_prompt=f'{system_prompt}, '
136
+ client2=client_z[int(hid_val)-1]
137
+ #client2=gr.load("models/" + models[1])
138
  if not history:
139
  history = []
140
  hist_len=0
 
155
  yield [(prompt,output)]
156
  history.append((prompt,output))
157
  yield history
158
+ else:
159
+ yield None
160
 
161
  def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
162
+ if len(client_choice)>=hid_val:
163
  if system_prompt:
164
  system_prompt=f'{system_prompt}, '
165
+ client3=client_z[int(hid_val)-1]
166
+ #client3=gr.load("models/" + models[2])
167
  if not history:
168
  history = []
169
  hist_len=0
 
184
  yield [(prompt,output)]
185
  history.append((prompt,output))
186
  yield history
187
+ else:
188
+ yield None
189
 
190
  def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
191
+ if len(client_choice)>=hid_val:
192
  if system_prompt:
193
  system_prompt=f'{system_prompt}, '
194
+ client4=client_z[int(hid_val)-1]
195
+ #client4=gr.load("models/" + models[3])
196
  if not history:
197
  history = []
198
  hist_len=0
 
213
  yield [(prompt,output)]
214
  history.append((prompt,output))
215
  yield history
216
+ else:
217
+ yield None
218
  def add_new_model(inp, cur):
219
  cur.append(inp)
220
  return cur,gr.update(choices=[z for z in cur])