amirgame197 commited on
Commit
58c547c
·
verified ·
1 Parent(s): c5f15af

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -3
app.py CHANGED
@@ -2,13 +2,19 @@ import gradio as gr
2
  from gradio_client import Client
3
  from huggingface_hub import InferenceClient
4
  import random
5
- from deep_translator import GoogleTranslator
6
 
7
  models=[
8
- "google/gemma-7b"
 
 
 
9
  ]
10
  clients=[
11
- InferenceClient(models[0])
 
 
 
12
  ]
13
 
14
  VERBOSE=False
@@ -85,6 +91,16 @@ def chat_inf(system_prompt,prompt,history,memory,client_choice,seed,temp,tokens,
85
  print("\n######### HIST "+str(in_len))
86
  print("\n######### TOKENS "+str(tokens))
87
 
 
 
 
 
 
 
 
 
 
 
88
  def clear_fn():
89
  return None,None,None,None
90
  rand_val=random.randint(1,1111111111111111)
@@ -123,14 +139,28 @@ with gr.Blocks() as app:
123
  top_p=gr.Slider(label="Top-P",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
124
  rep_p=gr.Slider(label="Repetition Penalty",step=0.1, minimum=0.1, maximum=2.0, value=1.0)
125
  chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
 
128
  client_choice.change(load_models,client_choice,[chat_b])
129
  app.load(load_models,client_choice,[chat_b])
130
 
 
131
 
132
  chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
133
  go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
134
 
 
135
  clear_btn.click(clear_fn,None,[inp,sys_inp,chat_b,memory])
136
  app.queue(default_concurrency_limit=10).launch()
 
2
  from gradio_client import Client
3
  from huggingface_hub import InferenceClient
4
  import random
5
+ ss_client = Client("https://omnibus-html-image-current-tab.hf.space/")
6
 
7
  models=[
8
+ "google/gemma-7b",
9
+ "google/gemma-7b-it",
10
+ "google/gemma-2b",
11
+ "google/gemma-2b-it"
12
  ]
13
  clients=[
14
+ InferenceClient(models[0]),
15
+ InferenceClient(models[1]),
16
+ InferenceClient(models[2]),
17
+ InferenceClient(models[3]),
18
  ]
19
 
20
  VERBOSE=False
 
91
  print("\n######### HIST "+str(in_len))
92
  print("\n######### TOKENS "+str(tokens))
93
 
94
+ def get_screenshot(chat: list,height=5000,width=600,chatblock=[],theme="light",wait=3000,header=True):
95
+ print(chatblock)
96
+ tog = 0
97
+ if chatblock:
98
+ tog = 3
99
+ result = ss_client.predict(str(chat),height,width,chatblock,header,theme,wait,api_name="/run_script")
100
+ out = f'https://omnibus-html-image-current-tab.hf.space/file={result[tog]}'
101
+ print(out)
102
+ return out
103
+
104
  def clear_fn():
105
  return None,None,None,None
106
  rand_val=random.randint(1,1111111111111111)
 
139
  top_p=gr.Slider(label="Top-P",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
140
  rep_p=gr.Slider(label="Repetition Penalty",step=0.1, minimum=0.1, maximum=2.0, value=1.0)
141
  chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
142
+ with gr.Accordion(label="Screenshot",open=False):
143
+ with gr.Row():
144
+ with gr.Column(scale=3):
145
+ im_btn=gr.Button("Screenshot")
146
+ img=gr.Image(type='filepath')
147
+ with gr.Column(scale=1):
148
+ with gr.Row():
149
+ im_height=gr.Number(label="Height",value=5000)
150
+ im_width=gr.Number(label="Width",value=500)
151
+ wait_time=gr.Number(label="Wait Time",value=3000)
152
+ theme=gr.Radio(label="Theme", choices=["light","dark"],value="light")
153
+ chatblock=gr.Dropdown(label="Chatblocks",info="Choose specific blocks of chat",choices=[c for c in range(1,40)],multiselect=True)
154
 
155
 
156
  client_choice.change(load_models,client_choice,[chat_b])
157
  app.load(load_models,client_choice,[chat_b])
158
 
159
+ im_go=im_btn.click(get_screenshot,[chat_b,im_height,im_width,chatblock,theme,wait_time],img)
160
 
161
  chat_sub=inp.submit(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
162
  go=btn.click(check_rand,[rand,seed],seed).then(chat_inf,[sys_inp,inp,chat_b,memory,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem,custom_prompt],[chat_b,memory])
163
 
164
+ stop_btn.click(None,None,None,cancels=[go,im_go,chat_sub])
165
  clear_btn.click(clear_fn,None,[inp,sys_inp,chat_b,memory])
166
  app.queue(default_concurrency_limit=10).launch()