asoria HF staff commited on
Commit
7d529e0
·
1 Parent(s): 6d0709a

Fix style and parameters

Browse files
Files changed (1) hide show
  1. app.py +35 -31
app.py CHANGED
@@ -3,15 +3,17 @@ from gradio_huggingfacehub_search import HuggingfaceHubSearch
3
  import nbformat as nbf
4
  from huggingface_hub import HfApi
5
 
 
6
  def create_notebook_file(cell_commands, notebook_name="generated_notebook.ipynb"):
7
  nb = nbf.v4.new_notebook()
8
- nb['cells'] = [nbf.v4.new_code_cell(command) for command in cell_commands]
9
 
10
- with open(notebook_name, 'w') as f:
11
  nbf.write(nb, f)
12
-
13
  print(f"Notebook '{notebook_name}' created successfully.")
14
 
 
15
  def push_notebook(file_path, dataset_id, token):
16
  api = HfApi(token=token)
17
  api.upload_file(
@@ -20,40 +22,36 @@ def push_notebook(file_path, dataset_id, token):
20
  repo_id=dataset_id,
21
  repo_type="dataset",
22
  )
 
23
  print("Notebook uploaded to Huggingface Hub.")
24
- link = f"https://huggingface.co/datasets/{dataset_id}/blob/main/dataset_analyst.ipynb"
 
 
25
  return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">See notebook</a>'
26
 
27
- def generate_notebook(dataset_id, token):
28
- api = HfApi(token=token)
29
- # TODO: Handle auth error
30
  # TODO: Get first config and split? or generate a dataframe per each split maybe?
31
  commands = [
32
- f"!pip install pandas",
33
- f"import pandas as pd",
34
  f"df = pd.read_parquet('hf://datasets/{dataset_id}/data/train-00000-of-00001.parquet')",
35
- f"df.head()",
36
  ]
37
  notebook_name = f"{dataset_id.replace('/', '-')}.ipynb"
38
  create_notebook_file(commands, notebook_name=notebook_name)
39
- api.upload_file(
40
- path_or_fileobj=notebook_name,
41
- path_in_repo="dataset_analysis.ipynb",
42
- repo_id="asoria/en-text",
43
- repo_type="dataset",
44
- )
45
- # TODO: Handle permission error
46
  print("Notebook uploaded to Huggingface Hub.")
47
  return notebook_name
48
 
 
49
  with gr.Blocks() as demo:
50
  gr.Markdown("# 🤖 Dataset auto analyst creator 🕵️")
51
  dataset_name = HuggingfaceHubSearch(
52
- label="Hub Dataset ID",
53
- placeholder="Search for dataset id on Huggingface",
54
- search_type="dataset",
55
- value="",
56
- )
57
 
58
  @gr.render(inputs=dataset_name)
59
  def embed(name):
@@ -72,28 +70,30 @@ with gr.Blocks() as demo:
72
  generate_btn = gr.Button("Generate notebook and push to repo", visible=True)
73
 
74
  download_link = gr.File(label="Download Notebook")
75
- generate_btn.click(generate_notebook, inputs=[dataset_name], outputs=[download_link])
 
 
76
  with gr.Row() as auth_page:
77
  with gr.Column():
78
  auth_title = gr.Markdown(
79
  "Enter your token ([settings](https://huggingface.co/settings/tokens)):"
80
  )
81
- token_box = gr.Textbox("", label="token", placeholder="hf_xxx", type="password"
 
82
  )
83
  auth_error = gr.Markdown("", visible=False)
84
 
85
  def auth(token):
86
  if not token:
87
  return {
88
- auth_error: gr.Markdown(value="", visible=False),
89
- push_btn: gr.Row(visible=False)
90
- }
91
  return {
92
  auth_error: gr.Markdown(value="", visible=False),
93
- push_btn: gr.Row(visible=True)
94
  }
95
 
96
-
97
  push_btn = gr.Button("Push notebook to repo", visible=False)
98
  token_box.change(
99
  auth,
@@ -102,5 +102,9 @@ with gr.Blocks() as demo:
102
  )
103
  output_lbl = gr.HTML(value="")
104
 
105
- push_btn.click(push_notebook, inputs=[download_link, dataset_name, token_box], outputs=[output_lbl])
106
- demo.launch()
 
 
 
 
 
3
  import nbformat as nbf
4
  from huggingface_hub import HfApi
5
 
6
+
7
  def create_notebook_file(cell_commands, notebook_name="generated_notebook.ipynb"):
8
  nb = nbf.v4.new_notebook()
9
+ nb["cells"] = [nbf.v4.new_code_cell(command) for command in cell_commands]
10
 
11
+ with open(notebook_name, "w") as f:
12
  nbf.write(nb, f)
13
+
14
  print(f"Notebook '{notebook_name}' created successfully.")
15
 
16
+
17
  def push_notebook(file_path, dataset_id, token):
18
  api = HfApi(token=token)
19
  api.upload_file(
 
22
  repo_id=dataset_id,
23
  repo_type="dataset",
24
  )
25
+ # TODO: Handle permission error
26
  print("Notebook uploaded to Huggingface Hub.")
27
+ link = (
28
+ f"https://huggingface.co/datasets/{dataset_id}/blob/main/dataset_analyst.ipynb"
29
+ )
30
  return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">See notebook</a>'
31
 
32
+
33
+ def generate_notebook(dataset_id):
 
34
  # TODO: Get first config and split? or generate a dataframe per each split maybe?
35
  commands = [
36
+ "!pip install pandas",
37
+ "import pandas as pd",
38
  f"df = pd.read_parquet('hf://datasets/{dataset_id}/data/train-00000-of-00001.parquet')",
39
+ "df.head()",
40
  ]
41
  notebook_name = f"{dataset_id.replace('/', '-')}.ipynb"
42
  create_notebook_file(commands, notebook_name=notebook_name)
 
 
 
 
 
 
 
43
  print("Notebook uploaded to Huggingface Hub.")
44
  return notebook_name
45
 
46
+
47
  with gr.Blocks() as demo:
48
  gr.Markdown("# 🤖 Dataset auto analyst creator 🕵️")
49
  dataset_name = HuggingfaceHubSearch(
50
+ label="Hub Dataset ID",
51
+ placeholder="Search for dataset id on Huggingface",
52
+ search_type="dataset",
53
+ value="",
54
+ )
55
 
56
  @gr.render(inputs=dataset_name)
57
  def embed(name):
 
70
  generate_btn = gr.Button("Generate notebook and push to repo", visible=True)
71
 
72
  download_link = gr.File(label="Download Notebook")
73
+ generate_btn.click(
74
+ generate_notebook, inputs=[dataset_name], outputs=[download_link]
75
+ )
76
  with gr.Row() as auth_page:
77
  with gr.Column():
78
  auth_title = gr.Markdown(
79
  "Enter your token ([settings](https://huggingface.co/settings/tokens)):"
80
  )
81
+ token_box = gr.Textbox(
82
+ "", label="token", placeholder="hf_xxx", type="password"
83
  )
84
  auth_error = gr.Markdown("", visible=False)
85
 
86
  def auth(token):
87
  if not token:
88
  return {
89
+ auth_error: gr.Markdown(value="", visible=False),
90
+ push_btn: gr.Row(visible=False),
91
+ }
92
  return {
93
  auth_error: gr.Markdown(value="", visible=False),
94
+ push_btn: gr.Row(visible=True),
95
  }
96
 
 
97
  push_btn = gr.Button("Push notebook to repo", visible=False)
98
  token_box.change(
99
  auth,
 
102
  )
103
  output_lbl = gr.HTML(value="")
104
 
105
+ push_btn.click(
106
+ push_notebook,
107
+ inputs=[download_link, dataset_name, token_box],
108
+ outputs=[output_lbl],
109
+ )
110
+ demo.launch()