akhaliq HF staff commited on
Commit
7dc82f0
·
1 Parent(s): 04085ad

use gemini for now

Browse files
Files changed (2) hide show
  1. app.py +1 -3
  2. utils.py +4 -18
app.py CHANGED
@@ -18,7 +18,6 @@ from app_openai import demo as demo_openai
18
  from app_paligemma import demo as demo_paligemma
19
  from app_perplexity import demo as demo_perplexity
20
  from app_playai import demo as demo_playai
21
- from app_qwen import demo as demo_qwen
22
  from app_replicate import demo as demo_replicate
23
  from app_sambanova import demo as demo_sambanova
24
  from app_showui import demo as demo_showui
@@ -29,7 +28,6 @@ from utils import get_app
29
 
30
  # Create mapping of providers to their demos
31
  PROVIDERS = {
32
- "Qwen": demo_qwen,
33
  "Gemini": demo_gemini,
34
  "OpenAI Voice": demo_openai_voice,
35
  "Gemini Voice": demo_gemini_voice,
@@ -58,7 +56,7 @@ PROVIDERS = {
58
  "NVIDIA": demo_nvidia,
59
  }
60
 
61
- demo = get_app(models=list(PROVIDERS.keys()), default_model="Qwen", src=PROVIDERS, dropdown_label="Select Provider")
62
 
63
  if __name__ == "__main__":
64
  demo.queue(api_open=False).launch(show_api=False)
 
18
  from app_paligemma import demo as demo_paligemma
19
  from app_perplexity import demo as demo_perplexity
20
  from app_playai import demo as demo_playai
 
21
  from app_replicate import demo as demo_replicate
22
  from app_sambanova import demo as demo_sambanova
23
  from app_showui import demo as demo_showui
 
28
 
29
  # Create mapping of providers to their demos
30
  PROVIDERS = {
 
31
  "Gemini": demo_gemini,
32
  "OpenAI Voice": demo_openai_voice,
33
  "Gemini Voice": demo_gemini_voice,
 
56
  "NVIDIA": demo_nvidia,
57
  }
58
 
59
+ demo = get_app(models=list(PROVIDERS.keys()), default_model="Gemini", src=PROVIDERS, dropdown_label="Select Provider")
60
 
61
  if __name__ == "__main__":
62
  demo.queue(api_open=False).launch(show_api=False)
utils.py CHANGED
@@ -21,24 +21,10 @@ def get_app(
21
  for model_name in models:
22
  with gr.Column(visible=model_name == default_model) as column:
23
  if isinstance(src, dict):
24
- if ':' in model_name:
25
- block = src[model_name]
26
- else:
27
- model_key = f"qwen:{model_name}"
28
- block = src.get(model_key, src.get(model_name))
29
-
30
- if isinstance(block, gr.Blocks):
31
- block.render()
32
- else:
33
- # Handle the case where block is a function by passing the model name
34
- block(name=model_name).render()
35
  else:
36
- # If src is a function, call it directly with the model name
37
- if callable(src):
38
- src(model_name, accept_token).render()
39
- else:
40
- gr.load(name=model_name, src=src, accept_token=accept_token, **kwargs)
41
- columns.append(column)
42
 
43
  model.change(
44
  fn=update_model,
@@ -51,4 +37,4 @@ def get_app(
51
  for fn in demo.fns.values():
52
  fn.api_name = False
53
 
54
- return demo
 
21
  for model_name in models:
22
  with gr.Column(visible=model_name == default_model) as column:
23
  if isinstance(src, dict):
24
+ src[model_name].render()
 
 
 
 
 
 
 
 
 
 
25
  else:
26
+ gr.load(name=model_name, src=src, accept_token=accept_token, **kwargs)
27
+ columns.append(column)
 
 
 
 
28
 
29
  model.change(
30
  fn=update_model,
 
37
  for fn in demo.fns.values():
38
  fn.api_name = False
39
 
40
+ return demo