File size: 3,054 Bytes
3acffd5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
from datasets import load_dataset
from collections import Counter, defaultdict
from random import sample, shuffle
import datasets
from pandas import DataFrame
from huggingface_hub import list_datasets
import os
import gradio as gr

import secrets

from traitlets import default

parti_prompt_results = []
ORG = "diffusers-parti-prompts"
SUBMISSIONS = {
    "sd-v1-5": None,
    "sd-v2-1": None,
    "if-v1-0": None,
    "karlo": None,
}
LINKS = {
    "sd-v1-5": "https://huggingface.co/runwayml/stable-diffusion-v1-5",
    "sd-v2-1": "https://huggingface.co/stabilityai/stable-diffusion-2-1",
    "if-v1-0": "https://huggingface.co/DeepFloyd/IF-I-XL-v1.0",
    "karlo": "https://huggingface.co/kakaobrain/karlo-v1-alpha",
}
MODEL_KEYS = "-".join(SUBMISSIONS.keys())
SUBMISSION_ORG = f"results-{MODEL_KEYS}"

submission_names = list(SUBMISSIONS.keys())
parti_prompt_categories = load_dataset(os.path.join(ORG, "sd-v1-5"))["train"]["Category"]
parti_prompt_challenge = load_dataset(os.path.join(ORG, "sd-v1-5"))["train"]["Challenge"]


def load_submissions():
    all_datasets = list_datasets(author=SUBMISSION_ORG)
    relevant_ids = [d.id for d in all_datasets]
    
    ids = defaultdict(list)
    challenges = defaultdict(list)
    categories = defaultdict(list)

    for _id in relevant_ids[:2]:
        ds = load_dataset(_id)["train"]
        for result, image_id in zip(ds["result"], ds["id"]):
            ids[result].append(image_id)
            challenges[parti_prompt_challenge[image_id]].append(result)
            categories[parti_prompt_categories[image_id]].append(result)
    
    main_dict = {k: len(v) for k, v in ids.item()}
    challenges = {k: Counter(v) for k, v in challenges.item()}
    categories = {k: Counter(v) for k, v in categories.item()}

    return main_dict, challenges, categories

def get_dataframe_all():
    main, challanges, categories= load_submissions()
    import ipdb; ipdb.set_trace()

TITLE = "# Community Parti Prompts - Who is your open-source genAI model?"
DESCRIPTION = """
*This is an interactive game in which you click through pre-generated images from SD-v1-5, SD-v2.1, Karlo, and IF 
using [Parti Prompts](https://huggingface.co/datasets/nateraw/parti-prompts) prompts.* \n
*You choices will go into the public community [genAI leaderboard](TODO).*
"""
EXPLANATION = """\n\n
## How it works πŸ“– \n\n

1. Click on 'Start'
2. A prompt and 4 different images are displayed
3. Select your favorite image
4. After 10 rounds your favorite diffusion model is displayed
"""

GALLERY_COLUMN_NUM = len(SUBMISSIONS)

with gr.Blocks() as demo:
    with gr.Column(visible=True) as intro_view:
        gr.Markdown(TITLE)
        gr.Markdown(DESCRIPTION)
        gr.Markdown(EXPLANATION)

    headers = list(SUBMISSIONS.keys())
    datatype = "str"

    dataframes = get_dataframe_all()
    import ipdb; ipdb.set_trace()

    main_dataframe = gr.Dataframe(
        headers=headers,
        datatype=datatype,
        row_count=1,
        col_count=(len(SUBMISSIONS)),
        interactive=False,
    )

demo.launch()