leaderboard / src /pages /submit.py
kunato's picture
init commit
baa7db6
raw
history blame
2.2 kB
from src.display.utils import EVAL_COLS, EVAL_TYPES
from src.envs import EVAL_REQUESTS_PATH
from src.populate import get_evaluation_queue_df
from src.submission.submit import add_new_eval
import gradio as gr
def show_submit_page(index: int):
(
finished_eval_queue_df,
running_eval_queue_df,
pending_eval_queue_df,
) = get_evaluation_queue_df(EVAL_REQUESTS_PATH, EVAL_COLS)
with gr.TabItem("πŸš€ Submit! ", elem_id="llm-benchmark-tab-table", id=index):
with gr.Column():
with gr.Accordion(
f"βœ… Finished Evaluations ({len(finished_eval_queue_df)})",
open=False,
):
with gr.Row():
finished_eval_table = gr.components.Dataframe(
value=finished_eval_queue_df,
headers=EVAL_COLS,
datatype=EVAL_TYPES,
row_count=5,
)
with gr.Accordion(
f"⏳ Pending Evaluation Queue ({len(pending_eval_queue_df)})",
open=False,
):
with gr.Row():
pending_eval_table = gr.components.Dataframe(
value=pending_eval_queue_df,
headers=EVAL_COLS,
datatype=EVAL_TYPES,
row_count=5,
)
with gr.Row():
gr.Markdown("# βœ‰οΈβœ¨ Submit your model!", elem_classes="markdown-text")
with gr.Row():
with gr.Column():
model_name_textbox = gr.Textbox(label="Huggingface Model")
link_to_model_blog = gr.Textbox(label="Model release blog / Technical report")
submit_button = gr.Button("Submit Model")
submission_result = gr.Markdown()
submit_button.click(
add_new_eval,
[
model_name_textbox,
link_to_model_blog
],
submission_result,
)
with gr.Row():
gr.Markdown('# βœ‰οΈβœ¨ Submit your task <a href="https://github.com">here!</a>', elem_classes="markdown-text")