Skip to content

Commit 5573aae

Browse files
authored
Fix arena (#2522)
1 parent c3ad73a commit 5573aae

File tree

2 files changed

+12
-9
lines changed

2 files changed

+12
-9
lines changed

fastchat/serve/gradio_block_arena_anony.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -161,22 +161,24 @@ def share_click(state0, state1, model_selector0, model_selector1, request: gr.Re
161161
"gpt-3.5-turbo": 2,
162162
"claude-2": 2,
163163
"claude-instant-1": 2,
164+
"deluxe-chat-v1": 4,
164165
# tire 1
165166
"palm-2": 1.5,
166167
"llama-2-70b-chat": 1.5,
167168
"llama-2-13b-chat": 1.5,
168169
"codellama-34b-instruct": 1.5,
169170
"vicuna-33b": 1.5,
170171
"vicuna-13b": 1.5,
171-
"mpt-30b-chat": 1.5,
172172
"wizardlm-70b": 1.5,
173173
"wizardlm-13b": 1.5,
174174
# tier 2
175-
"codellama-13b-instruct": 1.0,
176175
"vicuna-7b": 1.0,
177176
"llama-2-7b-chat": 1.0,
178177
"chatglm2-6b": 1.0,
178+
"mistral-7b-instruct": 1.0,
179179
# deprecated
180+
"codellama-13b-instruct": 1.0,
181+
"mpt-30b-chat": 1.5,
180182
"guanaco-33b": 1.0,
181183
"fastchat-t5-3b": 0.5,
182184
"alpaca-13b": 0.5,
@@ -193,9 +195,6 @@ def share_click(state0, state1, model_selector0, model_selector1, request: gr.Re
193195

194196
SAMPLING_BOOST_MODELS = []
195197

196-
model_pairs = []
197-
model_pairs_weights = []
198-
199198

200199
def add_text(
201200
state0, state1, model_selector0, model_selector1, text, request: gr.Request
@@ -208,7 +207,8 @@ def add_text(
208207
# Init states if necessary
209208
if states[0] is None:
210209
assert states[1] is None
211-
global model_pairs, model_pairs_weights
210+
model_pairs = []
211+
model_pairs_weights = []
212212

213213
# Pick two models
214214
if len(model_pairs) == 0:
@@ -226,9 +226,12 @@ def add_text(
226226

227227
model_pairs_weights = model_pairs_weights / np.sum(model_pairs_weights)
228228
# for p, w in zip(model_pairs, model_pairs_weights):
229-
# print(p, w)
229+
# print(p, w)
230230

231231
if len(model_pairs) >= 1:
232+
# if len(model_pairs) != len(model_pairs_weights):
233+
# print("model pairs", model_pairs, model_pairs_weights)
234+
# print("#model pairs", len(model_pairs), len(model_pairs_weights))
232235
idx = np.random.choice(len(model_pairs), p=model_pairs_weights)
233236
model_left, model_right = model_pairs[idx]
234237
else:
@@ -326,7 +329,7 @@ def bot_response_multi(
326329
):
327330
logger.info(f"bot_response_multi (anony). ip: {request.client.host}")
328331

329-
if state0.skip_next:
332+
if state0 is None or state0.skip_next:
330333
# This generate call is skipped due to invalid inputs
331334
yield (
332335
state0,

fastchat/serve/gradio_web_server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ def bot_response(state, temperature, top_p, max_new_tokens, request: gr.Request)
405405
try:
406406
for i, data in enumerate(stream_iter):
407407
if data["error_code"] == 0:
408-
if i % 5 != 0: # reduce gradio's overhead
408+
if i % 8 != 0: # reduce gradio's overhead
409409
continue
410410
output = data["text"].strip()
411411
conv.update_last_message(output + "▌")

0 commit comments

Comments
 (0)