Spaces:
Runtime error
Runtime error
Commit
·
4caf6f4
1
Parent(s):
da09cca
feat: remove the model selection dropdown
Browse filesSigned-off-by: Graham White <[email protected]>
- src/app.py +0 -31
src/app.py
CHANGED
|
@@ -27,8 +27,6 @@ TOP_P = 0.85
|
|
| 27 |
TOP_K = 50
|
| 28 |
REPETITION_PENALTY = 1.05
|
| 29 |
|
| 30 |
-
model_list = ["granite-3.1-8b-instruct", "granite-3.1-2b-instruct"]
|
| 31 |
-
|
| 32 |
if not torch.cuda.is_available():
|
| 33 |
DESCRIPTION += "\nThis demo does not work on CPU."
|
| 34 |
|
|
@@ -84,22 +82,6 @@ css_file_path = Path(Path(__file__).parent / "app.css")
|
|
| 84 |
head_file_path = Path(Path(__file__).parent / "app_head.html")
|
| 85 |
|
| 86 |
|
| 87 |
-
def on_model_dropdown_change(model_name: str) -> list:
|
| 88 |
-
"""Event handler for dropdown."""
|
| 89 |
-
global model
|
| 90 |
-
global tokenizer
|
| 91 |
-
|
| 92 |
-
model = AutoModelForCausalLM.from_pretrained(
|
| 93 |
-
f"ibm-granite/{model_name}", torch_dtype=torch.float16, device_map="auto"
|
| 94 |
-
)
|
| 95 |
-
tokenizer = AutoTokenizer.from_pretrained(f"ibm-granite/{model_name}")
|
| 96 |
-
tokenizer.use_default_system_prompt = False
|
| 97 |
-
|
| 98 |
-
# clear the chat interface when the model dropdown is changed
|
| 99 |
-
# works around https://github.com/gradio-app/gradio/issues/10343
|
| 100 |
-
return [None, []]
|
| 101 |
-
|
| 102 |
-
|
| 103 |
with gr.Blocks(
|
| 104 |
fill_height=True, css_paths=css_file_path, head_paths=head_file_path, theme=carbon_theme, title=TITLE
|
| 105 |
) as demo:
|
|
@@ -108,13 +90,6 @@ with gr.Blocks(
|
|
| 108 |
elem_classes=["gr_title"],
|
| 109 |
)
|
| 110 |
gr.HTML(DESCRIPTION)
|
| 111 |
-
model_dropdown = gr.Dropdown(
|
| 112 |
-
choices=model_list,
|
| 113 |
-
value="granite-3.1-8b-instruct",
|
| 114 |
-
interactive=True,
|
| 115 |
-
label="Model",
|
| 116 |
-
filterable=False,
|
| 117 |
-
)
|
| 118 |
gr.HTML(
|
| 119 |
value='<a href="https://www.ibm.com/granite/docs/">View Documentation</a> <i class="fa fa-external-link"></i>',
|
| 120 |
elem_classes=["gr_docs_link"],
|
|
@@ -131,11 +106,5 @@ with gr.Blocks(
|
|
| 131 |
type="messages",
|
| 132 |
)
|
| 133 |
|
| 134 |
-
model_dropdown.change(
|
| 135 |
-
fn=on_model_dropdown_change,
|
| 136 |
-
inputs=model_dropdown,
|
| 137 |
-
outputs=[chat_interface.chatbot, chat_interface.chatbot_state],
|
| 138 |
-
)
|
| 139 |
-
|
| 140 |
if __name__ == "__main__":
|
| 141 |
demo.queue().launch()
|
|
|
|
| 27 |
TOP_K = 50
|
| 28 |
REPETITION_PENALTY = 1.05
|
| 29 |
|
|
|
|
|
|
|
| 30 |
if not torch.cuda.is_available():
|
| 31 |
DESCRIPTION += "\nThis demo does not work on CPU."
|
| 32 |
|
|
|
|
| 82 |
head_file_path = Path(Path(__file__).parent / "app_head.html")
|
| 83 |
|
| 84 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
with gr.Blocks(
|
| 86 |
fill_height=True, css_paths=css_file_path, head_paths=head_file_path, theme=carbon_theme, title=TITLE
|
| 87 |
) as demo:
|
|
|
|
| 90 |
elem_classes=["gr_title"],
|
| 91 |
)
|
| 92 |
gr.HTML(DESCRIPTION)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
gr.HTML(
|
| 94 |
value='<a href="https://www.ibm.com/granite/docs/">View Documentation</a> <i class="fa fa-external-link"></i>',
|
| 95 |
elem_classes=["gr_docs_link"],
|
|
|
|
| 106 |
type="messages",
|
| 107 |
)
|
| 108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
if __name__ == "__main__":
|
| 110 |
demo.queue().launch()
|