Loubna ben allal
commited on
Commit
·
e6bed89
1
Parent(s):
a5d2ff2
update app
Browse files
app.py
CHANGED
@@ -42,18 +42,15 @@ model3 = load_model("facebook/opt-1.3b")
|
|
42 |
pipelines = {}
|
43 |
for model in models:
|
44 |
if model == "CodeParrot":
|
45 |
-
|
46 |
-
pipelines[model] = pipe
|
47 |
elif model == "InCoder":
|
48 |
tokenizer = load_tokenizer("facebook/incoder-1B")
|
49 |
model = load_model("facebook/incoder-1B")
|
50 |
-
|
51 |
-
pipelines[model] = pipe
|
52 |
else:
|
53 |
tokenizer = load_tokenizer("facebook/opt-1.3b")
|
54 |
model = load_model("facebook/opt-1.3b")
|
55 |
-
|
56 |
-
pipelines[model] = pipe
|
57 |
|
58 |
examples = load_examples()
|
59 |
example_names = [example["name"] for example in examples]
|
@@ -97,7 +94,8 @@ elif selected_task == "Code generation":
|
|
97 |
if st.button("Generate code!"):
|
98 |
with st.spinner("Generating code..."):
|
99 |
for model in selected_models:
|
|
|
100 |
pipe = pipelines[model]
|
101 |
generated_text = pipe(gen_prompt, **gen_kwargs)[0]['generated_text']
|
102 |
-
st.markdown(f"
|
103 |
st.code(generated_text)
|
|
|
42 |
pipelines = {}
|
43 |
for model in models:
|
44 |
if model == "CodeParrot":
|
45 |
+
pipelines[model] = pipeline("text-generation", model=model1, tokenizer=tokenizer1)
|
|
|
46 |
elif model == "InCoder":
|
47 |
tokenizer = load_tokenizer("facebook/incoder-1B")
|
48 |
model = load_model("facebook/incoder-1B")
|
49 |
+
pipelines[model] = pipeline("text-generation", model=model2, tokenizer=tokenizer2)
|
|
|
50 |
else:
|
51 |
tokenizer = load_tokenizer("facebook/opt-1.3b")
|
52 |
model = load_model("facebook/opt-1.3b")
|
53 |
+
pipelines[model] = pipeline("text-generation", model=model3, tokenizer=tokenizer3)
|
|
|
54 |
|
55 |
examples = load_examples()
|
56 |
example_names = [example["name"] for example in examples]
|
|
|
94 |
if st.button("Generate code!"):
|
95 |
with st.spinner("Generating code..."):
|
96 |
for model in selected_models:
|
97 |
+
st.markdown(f"{men(pipelines)} model is {model} keys {pipelines.keys()}:")
|
98 |
pipe = pipelines[model]
|
99 |
generated_text = pipe(gen_prompt, **gen_kwargs)[0]['generated_text']
|
100 |
+
st.markdown(f"{model}:")
|
101 |
st.code(generated_text)
|