Spaces:
Sleeping
Sleeping
Aspiring Astro
commited on
Commit
·
b343d52
1
Parent(s):
8d2450a
model selection and confirmation
Browse files
app.ipynb
CHANGED
@@ -438,7 +438,7 @@
|
|
438 |
},
|
439 |
{
|
440 |
"cell_type": "code",
|
441 |
-
"execution_count":
|
442 |
"id": "a48e7483-c04b-4048-a1ae-34a8c7986a57",
|
443 |
"metadata": {},
|
444 |
"outputs": [
|
@@ -451,83 +451,6 @@
|
|
451 |
"To create a public link, set `share=True` in `launch()`.\n"
|
452 |
]
|
453 |
},
|
454 |
-
{
|
455 |
-
"name": "stderr",
|
456 |
-
"output_type": "stream",
|
457 |
-
"text": [
|
458 |
-
"Traceback (most recent call last):\n",
|
459 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/gradio/routes.py\", line 321, in run_predict\n",
|
460 |
-
" output = await app.blocks.process_api(\n",
|
461 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/gradio/blocks.py\", line 1015, in process_api\n",
|
462 |
-
" result = await self.call_function(fn_index, inputs, iterator, request)\n",
|
463 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/gradio/blocks.py\", line 856, in call_function\n",
|
464 |
-
" prediction = await anyio.to_thread.run_sync(\n",
|
465 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/anyio/to_thread.py\", line 31, in run_sync\n",
|
466 |
-
" return await get_asynclib().run_sync_in_worker_thread(\n",
|
467 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/anyio/_backends/_asyncio.py\", line 937, in run_sync_in_worker_thread\n",
|
468 |
-
" return await future\n",
|
469 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/anyio/_backends/_asyncio.py\", line 867, in run\n",
|
470 |
-
" result = context.run(func, *args)\n",
|
471 |
-
" File \"/var/folders/jk/w8lkkz7n40s81208_5_qd5_80000gn/T/ipykernel_3681/233086315.py\", line 5, in classify_image\n",
|
472 |
-
" pred,idx,probs = learn.predict(img)\n",
|
473 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/fastai/learner.py\", line 312, in predict\n",
|
474 |
-
" dl = self.dls.test_dl([item], rm_type_tfms=rm_type_tfms, num_workers=0)\n",
|
475 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/fastai/data/core.py\", line 532, in test_dl\n",
|
476 |
-
" test_ds = test_set(self.valid_ds, test_items, rm_tfms=rm_type_tfms, with_labels=with_labels\n",
|
477 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/fastai/data/core.py\", line 511, in test_set\n",
|
478 |
-
" if rm_tfms is None: rm_tfms = [tl.infer_idx(get_first(test_items)) for tl in test_tls]\n",
|
479 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/fastai/data/core.py\", line 511, in <listcomp>\n",
|
480 |
-
" if rm_tfms is None: rm_tfms = [tl.infer_idx(get_first(test_items)) for tl in test_tls]\n",
|
481 |
-
" File \"/Users/ajithj/Library/Python/3.8/lib/python/site-packages/fastai/data/core.py\", line 405, in infer_idx\n",
|
482 |
-
" assert idx < len(self.types), f\"Expected an input of type in \\n{pretty_types}\\n but got {type(x)}\"\n",
|
483 |
-
"AssertionError: Expected an input of type in \n",
|
484 |
-
" - <class 'pathlib.PosixPath'>\n",
|
485 |
-
" - <class 'pathlib.Path'>\n",
|
486 |
-
" - <class 'str'>\n",
|
487 |
-
" - <class 'torch.Tensor'>\n",
|
488 |
-
" - <class 'numpy.ndarray'>\n",
|
489 |
-
" - <class 'bytes'>\n",
|
490 |
-
" - <class 'fastai.vision.core.PILImage'>\n",
|
491 |
-
" but got <class 'NoneType'>\n"
|
492 |
-
]
|
493 |
-
},
|
494 |
-
{
|
495 |
-
"data": {
|
496 |
-
"text/html": [
|
497 |
-
"\n",
|
498 |
-
"<style>\n",
|
499 |
-
" /* Turns off some styling */\n",
|
500 |
-
" progress {\n",
|
501 |
-
" /* gets rid of default border in Firefox and Opera. */\n",
|
502 |
-
" border: none;\n",
|
503 |
-
" /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
|
504 |
-
" background-size: auto;\n",
|
505 |
-
" }\n",
|
506 |
-
" progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
|
507 |
-
" background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
|
508 |
-
" }\n",
|
509 |
-
" .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
|
510 |
-
" background: #F44336;\n",
|
511 |
-
" }\n",
|
512 |
-
"</style>\n"
|
513 |
-
],
|
514 |
-
"text/plain": [
|
515 |
-
"<IPython.core.display.HTML object>"
|
516 |
-
]
|
517 |
-
},
|
518 |
-
"metadata": {},
|
519 |
-
"output_type": "display_data"
|
520 |
-
},
|
521 |
-
{
|
522 |
-
"data": {
|
523 |
-
"text/html": [],
|
524 |
-
"text/plain": [
|
525 |
-
"<IPython.core.display.HTML object>"
|
526 |
-
]
|
527 |
-
},
|
528 |
-
"metadata": {},
|
529 |
-
"output_type": "display_data"
|
530 |
-
},
|
531 |
{
|
532 |
"data": {
|
533 |
"text/html": [
|
@@ -601,21 +524,6 @@
|
|
601 |
},
|
602 |
"metadata": {},
|
603 |
"output_type": "display_data"
|
604 |
-
},
|
605 |
-
{
|
606 |
-
"name": "stdout",
|
607 |
-
"output_type": "stream",
|
608 |
-
"text": [
|
609 |
-
"Keyboard interruption in main thread... closing server.\n"
|
610 |
-
]
|
611 |
-
},
|
612 |
-
{
|
613 |
-
"data": {
|
614 |
-
"text/plain": []
|
615 |
-
},
|
616 |
-
"execution_count": 19,
|
617 |
-
"metadata": {},
|
618 |
-
"output_type": "execute_result"
|
619 |
}
|
620 |
],
|
621 |
"source": [
|
@@ -627,11 +535,12 @@
|
|
627 |
" with gr.Column(variant=\"panel\"):\n",
|
628 |
" image = gr.inputs.Image(label=\"Pick an image\")\n",
|
629 |
" model = gr.inputs.Dropdown(label=\"Select a model\", choices=models)\n",
|
630 |
-
" model.change(fn=select_model, inputs=model, outputs=None)\n",
|
631 |
" btnClassify = gr.Button(\"Classify\")\n",
|
632 |
" with gr.Column(variant=\"panel\"):\n",
|
|
|
633 |
" result = gr.outputs.Label(label=\"Result\")\n",
|
634 |
" \n",
|
|
|
635 |
" btnClassify.click(fn=classify_image, inputs=image, outputs=result)\n",
|
636 |
" img_gallery = gr.Examples(examples=example_images, inputs=image)\n",
|
637 |
"\n",
|
@@ -643,10 +552,18 @@
|
|
643 |
},
|
644 |
{
|
645 |
"cell_type": "code",
|
646 |
-
"execution_count":
|
647 |
"id": "cab071f9-7c3b-4b35-a0d1-3687731ffce5",
|
648 |
"metadata": {},
|
649 |
-
"outputs": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
650 |
"source": [
|
651 |
"import nbdev\n",
|
652 |
"nbdev.export.nb_export('app.ipynb', './')\n",
|
|
|
438 |
},
|
439 |
{
|
440 |
"cell_type": "code",
|
441 |
+
"execution_count": null,
|
442 |
"id": "a48e7483-c04b-4048-a1ae-34a8c7986a57",
|
443 |
"metadata": {},
|
444 |
"outputs": [
|
|
|
451 |
"To create a public link, set `share=True` in `launch()`.\n"
|
452 |
]
|
453 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
454 |
{
|
455 |
"data": {
|
456 |
"text/html": [
|
|
|
524 |
},
|
525 |
"metadata": {},
|
526 |
"output_type": "display_data"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
527 |
}
|
528 |
],
|
529 |
"source": [
|
|
|
535 |
" with gr.Column(variant=\"panel\"):\n",
|
536 |
" image = gr.inputs.Image(label=\"Pick an image\")\n",
|
537 |
" model = gr.inputs.Dropdown(label=\"Select a model\", choices=models)\n",
|
|
|
538 |
" btnClassify = gr.Button(\"Classify\")\n",
|
539 |
" with gr.Column(variant=\"panel\"):\n",
|
540 |
+
" selected = gr.outputs.Textbox(label=\"Active Model\")\n",
|
541 |
" result = gr.outputs.Label(label=\"Result\")\n",
|
542 |
" \n",
|
543 |
+
" model.change(fn=select_model, inputs=model, outputs=selected)\n",
|
544 |
" btnClassify.click(fn=classify_image, inputs=image, outputs=result)\n",
|
545 |
" img_gallery = gr.Examples(examples=example_images, inputs=image)\n",
|
546 |
"\n",
|
|
|
552 |
},
|
553 |
{
|
554 |
"cell_type": "code",
|
555 |
+
"execution_count": 20,
|
556 |
"id": "cab071f9-7c3b-4b35-a0d1-3687731ffce5",
|
557 |
"metadata": {},
|
558 |
+
"outputs": [
|
559 |
+
{
|
560 |
+
"name": "stdout",
|
561 |
+
"output_type": "stream",
|
562 |
+
"text": [
|
563 |
+
"Export successful\n"
|
564 |
+
]
|
565 |
+
}
|
566 |
+
],
|
567 |
"source": [
|
568 |
"import nbdev\n",
|
569 |
"nbdev.export.nb_export('app.ipynb', './')\n",
|
app.py
CHANGED
@@ -47,11 +47,12 @@ with demo:
|
|
47 |
with gr.Column(variant="panel"):
|
48 |
image = gr.inputs.Image(label="Pick an image")
|
49 |
model = gr.inputs.Dropdown(label="Select a model", choices=models)
|
50 |
-
model.change(fn=select_model, inputs=model, outputs=None)
|
51 |
btnClassify = gr.Button("Classify")
|
52 |
with gr.Column(variant="panel"):
|
|
|
53 |
result = gr.outputs.Label(label="Result")
|
54 |
|
|
|
55 |
btnClassify.click(fn=classify_image, inputs=image, outputs=result)
|
56 |
img_gallery = gr.Examples(examples=example_images, inputs=image)
|
57 |
|
|
|
47 |
with gr.Column(variant="panel"):
|
48 |
image = gr.inputs.Image(label="Pick an image")
|
49 |
model = gr.inputs.Dropdown(label="Select a model", choices=models)
|
|
|
50 |
btnClassify = gr.Button("Classify")
|
51 |
with gr.Column(variant="panel"):
|
52 |
+
selected = gr.outputs.Textbox(label="Active Model")
|
53 |
result = gr.outputs.Label(label="Result")
|
54 |
|
55 |
+
model.change(fn=select_model, inputs=model, outputs=selected)
|
56 |
btnClassify.click(fn=classify_image, inputs=image, outputs=result)
|
57 |
img_gallery = gr.Examples(examples=example_images, inputs=image)
|
58 |
|