loubnabnl HF Staff commited on
Commit
8d9fca0
·
1 Parent(s): a9e00bb

update code

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -38,14 +38,14 @@ def how_to_load(language):
38
  def load_model(values, language):
39
  model = values["model"]
40
  if not model:
41
- text = f"""No model available for {language.capitalize()}. If you trained a model on this language, let us know in\
42
  in the [Community tab](https://huggingface.co/spaces/loubnabnl/the-stack-bot/discussions) to feature your model!\n\
43
  You can also train your own model on The Stack using the instructions below 🚀"""
44
  st.write(text)
45
  if st.button("Fine-tune your own model", key=4):
46
  st.write("Code available at [GitHub link] + add preview")
47
  else:
48
- text = f"""{model} is a model that was trained on the {language.capitalize()} subset of The Stack. Here's how to use it:"""
49
  code = f"""
50
  ```python
51
  from transformers import AutoModelForCausalLM, AutoTokenizer
@@ -106,7 +106,7 @@ if st.session_state["Models trained on dataset"]:
106
  load_model(languages[selected_language], selected_language)
107
 
108
  if languages[selected_language]["model"] and languages[selected_language]["gradio_demo"]:
109
- st.write(f"Here's a demo to try the model, for more flexibilty you can use the [Gradio demo]({languages[selected_language]['gradio_demo']}).")
110
  gen_prompt = st.text_area(
111
  "Generate code with prompt:",
112
  value="# Implement a function to print hello world",
 
38
  def load_model(values, language):
39
  model = values["model"]
40
  if not model:
41
+ text = f"""No model is available for {language.capitalize()}. If you trained a model on this language, let us know in\
42
  in the [Community tab](https://huggingface.co/spaces/loubnabnl/the-stack-bot/discussions) to feature your model!\n\
43
  You can also train your own model on The Stack using the instructions below 🚀"""
44
  st.write(text)
45
  if st.button("Fine-tune your own model", key=4):
46
  st.write("Code available at [GitHub link] + add preview")
47
  else:
48
+ text = f"""[{model}](hf.co/{model}) is a model trained on the {language.capitalize()} subset of The Stack. Here's how to use it:"""
49
  code = f"""
50
  ```python
51
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
106
  load_model(languages[selected_language], selected_language)
107
 
108
  if languages[selected_language]["model"] and languages[selected_language]["gradio_demo"]:
109
+ st.write(f"Here's a demo to try it, for more flexibilty you can use the original [Gradio demo]({languages[selected_language]['gradio_demo']}).")
110
  gen_prompt = st.text_area(
111
  "Generate code with prompt:",
112
  value="# Implement a function to print hello world",