Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -208,7 +208,7 @@ image_examples = [
|
|
208 |
["explain the movie shot in detail.", "images/5.jpg"],
|
209 |
["convert this page to doc [text] precisely for markdown.", "images/1.png"],
|
210 |
["convert this page to doc [table] precisely for markdown.", "images/2.png"],
|
211 |
-
["explain the movie shot in detail.", "images/3.png"],
|
212 |
["fill the correct numbers.", "images/4.png"]
|
213 |
]
|
214 |
|
@@ -231,6 +231,42 @@ css = """
|
|
231 |
border-radius: 10px;
|
232 |
padding: 20px;
|
233 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
234 |
"""
|
235 |
|
236 |
# Create the Gradio Interface
|
@@ -267,13 +303,14 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
267 |
with gr.Column(elem_classes="canvas-output"):
|
268 |
gr.Markdown("## Output")
|
269 |
output = gr.Textbox(label="Raw Output Stream", interactive=False, lines=2)
|
270 |
-
with gr.Accordion("(Result.md)", open=False):
|
271 |
markdown_output = gr.Markdown(label="(Result.md)")
|
272 |
|
273 |
model_choice = gr.Radio(
|
274 |
choices=["Camel-Doc-OCR-062825", "GLM-4.1V-9B-Thinking", "Megalodon-OCR-Sync-0713", "ViLaSR-7B"],
|
275 |
label="Select Model",
|
276 |
-
value="Camel-Doc-OCR-062825"
|
|
|
277 |
)
|
278 |
gr.Markdown("**Model Info 💻** | [Report Bug](https://huggingface.co/spaces/prithivMLmods/Multimodal-VLM-v1.0/discussions)")
|
279 |
|
|
|
208 |
["explain the movie shot in detail.", "images/5.jpg"],
|
209 |
["convert this page to doc [text] precisely for markdown.", "images/1.png"],
|
210 |
["convert this page to doc [table] precisely for markdown.", "images/2.png"],
|
211 |
+
["explain the movie shot in detail.", "images/3.png"],
|
212 |
["fill the correct numbers.", "images/4.png"]
|
213 |
]
|
214 |
|
|
|
231 |
border-radius: 10px;
|
232 |
padding: 20px;
|
233 |
}
|
234 |
+
.model-choices .gr-form:nth-child(1) label {
|
235 |
+
background-color: #e0f7fa !important;
|
236 |
+
border: 1px solid #00796b !important;
|
237 |
+
color: #00796b !important;
|
238 |
+
}
|
239 |
+
.model-choices .gr-form:nth-child(3) label {
|
240 |
+
background-color: #e0f7fa !important;
|
241 |
+
border: 1px solid #00796b !important;
|
242 |
+
color: #00796b !important;
|
243 |
+
}
|
244 |
+
.model-choices .gr-form:nth-child(2) label {
|
245 |
+
background-color: #fff3e0 !important;
|
246 |
+
border: 1px solid #f57c00 !important;
|
247 |
+
color: #f57c00 !important;
|
248 |
+
}
|
249 |
+
.model-choices .gr-form:nth-child(4) label {
|
250 |
+
background-color: #fff3e0 !important;
|
251 |
+
border: 1px solid #f57c00 !important;
|
252 |
+
color: #f57c00 !important;
|
253 |
+
}
|
254 |
+
.model-choices .gr-form:nth-child(1) label::after {
|
255 |
+
content: " (OCR)";
|
256 |
+
font-weight: bold;
|
257 |
+
}
|
258 |
+
.model-choices .gr-form:nth-child(3) label::after {
|
259 |
+
content: " (OCR)";
|
260 |
+
font-weight: bold;
|
261 |
+
}
|
262 |
+
.model-choices .gr-form:nth-child(2) label::after {
|
263 |
+
content: " (Reasoning)";
|
264 |
+
font-weight: bold;
|
265 |
+
}
|
266 |
+
.model-choices .gr-form:nth-child(4) label::after {
|
267 |
+
content: " (Reasoning)";
|
268 |
+
font-weight: bold;
|
269 |
+
}
|
270 |
"""
|
271 |
|
272 |
# Create the Gradio Interface
|
|
|
303 |
with gr.Column(elem_classes="canvas-output"):
|
304 |
gr.Markdown("## Output")
|
305 |
output = gr.Textbox(label="Raw Output Stream", interactive=False, lines=2)
|
306 |
+
with gr.Accordion("(Result.md)", open=False):
|
307 |
markdown_output = gr.Markdown(label="(Result.md)")
|
308 |
|
309 |
model_choice = gr.Radio(
|
310 |
choices=["Camel-Doc-OCR-062825", "GLM-4.1V-9B-Thinking", "Megalodon-OCR-Sync-0713", "ViLaSR-7B"],
|
311 |
label="Select Model",
|
312 |
+
value="Camel-Doc-OCR-062825",
|
313 |
+
elem_classes="model-choices"
|
314 |
)
|
315 |
gr.Markdown("**Model Info 💻** | [Report Bug](https://huggingface.co/spaces/prithivMLmods/Multimodal-VLM-v1.0/discussions)")
|
316 |
|