1
File size: 1,245 Bytes
1595107
 
 
eb6b376
1595107
13b44ec
1595107
13b44ec
1595107
 
 
13b44ec
1595107
 
 
13b44ec
1595107
13b44ec
 
1595107
 
 
 
13b44ec
 
1595107
13b44ec
1595107
 
 
 
 
 
445b48b
13b44ec
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from gradio_client import Client, handle_file
from huggingface_hub import HfApi
import time
import gradio as gr
api = HfApi()

repo_ids = ["HuggingFaceH4/idefics2-8b-playground"]

for repo_id in repo_ids:
    if api.space_info(repo_id).runtime.stage not in ["RUNNING", "APP_STARTING", "RUNNING_APP_STARTING"]:
        api.restart_space(repo_id="HuggingFaceH4/idefics2-8b-playground")

for repo_id in repo_ids:
    while api.space_info(repo_id).runtime.stage != "RUNNING":
        time.sleep(1)

client_idefics2 = Client("HuggingFaceH4/idefics2-8b-playground")


def respond(multimodal_input):
    x = {"text": multimodal_input["text"], "files": [handle_file(file) for file in multimodal_input["files"]]}
    text_1 = client_idefics2.predict(x, api_name="/predict")
    return text_1


gr.Interface(
    respond,
    title="Compare IDEFICS2-8B Against DPO",
    description="Chatchat made by Peterpeter8585",
    inputs=[gr.MultimodalTextbox(file_types=["image"], show_label=False)],
    outputs=[gr.Textbox(label="idefics2-8b")],
    examples=[{"text": "What is the type of flower in the image and what insect is on it?", "files": ["./bee.jpg"]},
             {"text": "Describe the image", "files": ["./howl.jpg"]}],
).launch(show_error=True)