Spaces:
Runtime error
Runtime error
File size: 4,104 Bytes
a5b48a7 0b9c45d a5b48a7 0b9c45d a5b48a7 0b9c45d a5b48a7 0b9c45d a5b48a7 0b9c45d a5b48a7 5c13efa 7178e21 66afc6f 5c13efa 0b9c45d a5b48a7 66afc6f 0b9c45d 5c13efa 66afc6f 5c13efa a5b48a7 66afc6f cf47250 66afc6f c0e1a51 405fa0f c0e1a51 405fa0f 66afc6f c0e1a51 405fa0f c0e1a51 405fa0f c0e1a51 2115b8f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
import gradio as gr
import requests
import os
import json
import random
import threading
from elo import update_elo_ratings # Custom function for ELO ratings
# Load the chatbot URLs and their respective model names from a JSON file
with open('chatbot_urls.json', 'r') as file:
chatbots = json.load(file)
# Thread-local storage for user-specific data
user_data = threading.local()
# Initialize or get user-specific ELO ratings
def get_user_elo_ratings():
if not hasattr(user_data, 'elo_ratings'):
user_data.elo_ratings = read_elo_ratings()
return user_data.elo_ratings
# Read ELO ratings from file (thread-safe)
def read_elo_ratings():
elo_ratings = {}
with threading.Lock():
try:
with open('elo_ratings.json', 'r') as file:
elo_ratings = json.load(file)
except FileNotFoundError:
elo_ratings = {model: 1200 for model in chatbots.keys()}
return elo_ratings
# Write ELO ratings to file (thread-safe)
def write_elo_ratings(elo_ratings):
with threading.Lock():
with open('elo_ratings.json', 'w') as file:
json.dump(elo_ratings, file, indent=4)
def get_bot_response(url, prompt):
payload = {
"input": {
"prompt": prompt,
"sampling_params": {
"max_new_tokens": 16,
"temperature": 0.7,
}
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": os.environ.get("RUNPOD_TOKEN")
}
response = requests.post(url, json=payload, headers=headers)
return response.json()
def chat_with_bots(user_input):
bot_names = list(chatbots.keys())
random.shuffle(bot_names)
bot1_url, bot2_url = chatbots[bot_names[0]], chatbots[bot_names[1]]
bot1_response = get_bot_response(bot1_url, user_input)
bot2_response = get_bot_response(bot2_url, user_input)
return bot1_response, bot2_response
def update_ratings(state, winner_index):
elo_ratings = get_user_elo_ratings()
bot_names = list(chatbots.keys())
winner = state['last_bots'][winner_index]
loser = state['last_bots'][1 - winner_index]
elo_ratings = update_elo_ratings(elo_ratings, winner, loser)
write_elo_ratings(elo_ratings)
return f"Updated ELO ratings:\n{winner}: {elo_ratings[winner]}\n{loser}: {elo_ratings[loser]}"
def vote_up_model(state, chatbot):
update_message = update_ratings(state, 0)
chatbot.append(update_message)
return chatbot
def vote_down_model(state, chatbot):
update_message = update_ratings(state, 1)
chatbot.append(update_message)
return chatbot
def user_ask(state, chatbot1, chatbot2, textbox):
user_input = textbox.value
bot1_response, bot2_response = chat_with_bots(user_input)
chatbot1.append("User: " + user_input)
chatbot1.append("Bot 1: " + bot1_response['output'])
chatbot2.append("User: " + user_input)
chatbot2.append("Bot 2: " + bot2_response['output'])
state['last_bots'] = [bot1_response['model_name'], bot2_response['model_name']]
return state, chatbot1, chatbot2, ""
# ... [Rest of your existing functions] ...
with gr.Blocks() as demo:
with gr.Row():
with gr.Column(scale=0.5):
with gr.Row() as button_row:
upvote_btn = gr.Button(value="<= Vote for A", interactive=False)
downvote_btn = gr.Button(value="=> Vote for B", interactive=False)
tie_btn = gr.Button(value="🤝 Tie", interactive=False)
clear_btn = gr.Button(value="🗑️ Clear", interactive=False)
with gr.Column():
chatbot1 = gr.Chatbot(label='Model A')
chatbot2 = gr.Chatbot(label='Model B')
with gr.Row():
with gr.Column(scale=8):
textbox = gr.Textbox(placeholder="Enter your prompt and press ENTER")
with gr.Column(scale=1, min_width=60):
submit_btn = gr.Button(value="Send")
demo.launch(share=True, server_name='0.0.0.0', server_port=7860) |