url
stringlengths
58
61
repository_url
stringclasses
1 value
labels_url
stringlengths
72
75
comments_url
stringlengths
67
70
events_url
stringlengths
65
68
html_url
stringlengths
46
51
id
int64
599M
2.12B
node_id
stringlengths
18
32
number
int64
1
6.65k
title
stringlengths
1
290
user
dict
labels
listlengths
0
4
state
stringclasses
2 values
locked
bool
1 class
assignee
dict
assignees
listlengths
0
4
milestone
dict
comments
int64
0
70
created_at
timestamp[ns, tz=UTC]
updated_at
timestamp[ns, tz=UTC]
closed_at
timestamp[ns, tz=UTC]
author_association
stringclasses
3 values
active_lock_reason
float64
draft
float64
0
1
pull_request
dict
body
stringlengths
0
228k
reactions
dict
timeline_url
stringlengths
67
70
performed_via_github_app
float64
state_reason
stringclasses
3 values
is_pull_request
bool
2 classes
https://api.github.com/repos/huggingface/datasets/issues/3772
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3772/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3772/comments
https://api.github.com/repos/huggingface/datasets/issues/3772/events
https://github.com/huggingface/datasets/pull/3772
1,146,718,630
PR_kwDODunzps4zRor8
3,772
Fix: dataset name is stored in keys
{ "avatar_url": "https://avatars.githubusercontent.com/u/24695242?v=4", "events_url": "https://api.github.com/users/thomasw21/events{/privacy}", "followers_url": "https://api.github.com/users/thomasw21/followers", "following_url": "https://api.github.com/users/thomasw21/following{/other_user}", "gists_url": "https://api.github.com/users/thomasw21/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/thomasw21", "id": 24695242, "login": "thomasw21", "node_id": "MDQ6VXNlcjI0Njk1MjQy", "organizations_url": "https://api.github.com/users/thomasw21/orgs", "received_events_url": "https://api.github.com/users/thomasw21/received_events", "repos_url": "https://api.github.com/users/thomasw21/repos", "site_admin": false, "starred_url": "https://api.github.com/users/thomasw21/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/thomasw21/subscriptions", "type": "User", "url": "https://api.github.com/users/thomasw21" }
[]
closed
false
null
[]
null
0
2022-02-22T10:20:37Z
2022-02-22T11:08:34Z
2022-02-22T11:08:33Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3772.diff", "html_url": "https://github.com/huggingface/datasets/pull/3772", "merged_at": "2022-02-22T11:08:33Z", "patch_url": "https://github.com/huggingface/datasets/pull/3772.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3772" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3772/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3772/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3771
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3771/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3771/comments
https://api.github.com/repos/huggingface/datasets/issues/3771/events
https://github.com/huggingface/datasets/pull/3771
1,146,561,140
PR_kwDODunzps4zRHsd
3,771
Fix DuplicatedKeysError on msr_sqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-22T07:44:24Z
2022-02-22T08:12:40Z
2022-02-22T08:12:39Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3771.diff", "html_url": "https://github.com/huggingface/datasets/pull/3771", "merged_at": "2022-02-22T08:12:39Z", "patch_url": "https://github.com/huggingface/datasets/pull/3771.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3771" }
Fix #3770.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3771/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3771/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3770
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3770/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3770/comments
https://api.github.com/repos/huggingface/datasets/issues/3770/events
https://github.com/huggingface/datasets/issues/3770
1,146,336,667
I_kwDODunzps5EU7Wb
3,770
DuplicatedKeysError on msr_sqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/9049591?v=4", "events_url": "https://api.github.com/users/kolk/events{/privacy}", "followers_url": "https://api.github.com/users/kolk/followers", "following_url": "https://api.github.com/users/kolk/following{/other_user}", "gists_url": "https://api.github.com/users/kolk/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kolk", "id": 9049591, "login": "kolk", "node_id": "MDQ6VXNlcjkwNDk1OTE=", "organizations_url": "https://api.github.com/users/kolk/orgs", "received_events_url": "https://api.github.com/users/kolk/received_events", "repos_url": "https://api.github.com/users/kolk/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kolk/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kolk/subscriptions", "type": "User", "url": "https://api.github.com/users/kolk" }
[]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
1
2022-02-22T00:43:33Z
2022-02-22T08:12:39Z
2022-02-22T08:12:39Z
NONE
null
null
null
### Describe the bug Failure to generate dataset msr_sqa because of duplicate keys. ### Steps to reproduce the bug ``` from datasets import load_dataset load_dataset("msr_sqa") ``` ### Expected results The examples keys should be unique. **Actual results** ``` >>> load_dataset("msr_sqa") Downloading: 6.72k/? [00:00<00:00, 148kB/s] Downloading: 2.93k/? [00:00<00:00, 53.8kB/s] Using custom data configuration default Downloading and preparing dataset msr_sqa/default (download: 4.57 MiB, generated: 26.25 MiB, post-processed: Unknown size, total: 30.83 MiB) to /root/.cache/huggingface/datasets/msr_sqa/default/0.0.0/70b2a497bd3cc8fc960a3557d2bad1eac5edde824505e15c9c8ebe4c260fd4d1... Downloading: 100% 4.80M/4.80M [00:00<00:00, 7.49MB/s] --------------------------------------------------------------------------- DuplicatedKeysError Traceback (most recent call last) [/usr/local/lib/python3.7/dist-packages/datasets/builder.py](https://localhost:8080/#) in _prepare_split(self, split_generator) 1080 example = self.info.features.encode_example(record) -> 1081 writer.write(example, key) 1082 finally: 8 frames DuplicatedKeysError: FAILURE TO GENERATE DATASET ! Found duplicate Key: nt-639 Keys should be unique and deterministic in nature During handling of the above exception, another exception occurred: DuplicatedKeysError Traceback (most recent call last) [/usr/local/lib/python3.7/dist-packages/datasets/arrow_writer.py](https://localhost:8080/#) in check_duplicate_keys(self) 449 for hash, key in self.hkey_record: 450 if hash in tmp_record: --> 451 raise DuplicatedKeysError(key) 452 else: 453 tmp_record.add(hash) DuplicatedKeysError: FAILURE TO GENERATE DATASET ! Found duplicate Key: nt-639 Keys should be unique and deterministic in nature ``` ### Environment info datasets version: 1.18.3 Platform: Google colab notebook Python version: 3.7 PyArrow version: 6.0.1
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3770/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3770/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3769
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3769/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3769/comments
https://api.github.com/repos/huggingface/datasets/issues/3769/events
https://github.com/huggingface/datasets/issues/3769
1,146,258,023
I_kwDODunzps5EUoJn
3,769
`dataset = dataset.map()` causes faiss index lost
{ "avatar_url": "https://avatars.githubusercontent.com/u/13076552?v=4", "events_url": "https://api.github.com/users/Oaklight/events{/privacy}", "followers_url": "https://api.github.com/users/Oaklight/followers", "following_url": "https://api.github.com/users/Oaklight/following{/other_user}", "gists_url": "https://api.github.com/users/Oaklight/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Oaklight", "id": 13076552, "login": "Oaklight", "node_id": "MDQ6VXNlcjEzMDc2NTUy", "organizations_url": "https://api.github.com/users/Oaklight/orgs", "received_events_url": "https://api.github.com/users/Oaklight/received_events", "repos_url": "https://api.github.com/users/Oaklight/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Oaklight/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Oaklight/subscriptions", "type": "User", "url": "https://api.github.com/users/Oaklight" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
open
false
null
[]
null
3
2022-02-21T21:59:23Z
2022-06-27T14:56:29Z
null
NONE
null
null
null
## Describe the bug assigning the resulted dataset to original dataset causes lost of the faiss index ## Steps to reproduce the bug `my_dataset` is a regular loaded dataset. It's a part of a customed dataset structure ```python self.dataset.add_faiss_index('embeddings') self.dataset.list_indexes() # ['embeddings'] dataset2 = my_dataset.map( lambda x: self._get_nearest_examples_batch(x['text']), batch=True ) # the unexpected result: dataset2.list_indexes() # [] self.dataset.list_indexes() # ['embeddings'] ``` in case something wrong with my `_get_nearest_examples_batch()`, it's like this ```python def _get_nearest_examples_batch(self, examples, k=5): queries = embed(examples) scores_batch, retrievals_batch = self.dataset.get_nearest_examples_batch(self.faiss_column, queries, k) return { 'neighbors': [batch['text'] for batch in retrievals_batch], 'scores': scores_batch } ``` ## Expected results `map` shouldn't drop the indexes, in another word, indexes should be carried to the generated dataset ## Actual results map drops the indexes ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version: 1.18.3 - Platform: Ubuntu 20.04.3 LTS - Python version: 3.8.12 - PyArrow version: 7.0.0
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3769/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3769/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3768
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3768/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3768/comments
https://api.github.com/repos/huggingface/datasets/issues/3768/events
https://github.com/huggingface/datasets/pull/3768
1,146,102,442
PR_kwDODunzps4zPobl
3,768
Fix HfFileSystem docstring
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
0
2022-02-21T18:14:40Z
2022-02-22T09:13:03Z
2022-02-22T09:13:02Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3768.diff", "html_url": "https://github.com/huggingface/datasets/pull/3768", "merged_at": "2022-02-22T09:13:02Z", "patch_url": "https://github.com/huggingface/datasets/pull/3768.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3768" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3768/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3768/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3767
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3767/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3767/comments
https://api.github.com/repos/huggingface/datasets/issues/3767/events
https://github.com/huggingface/datasets/pull/3767
1,146,036,648
PR_kwDODunzps4zPahh
3,767
Expose method and fix param
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[]
closed
false
null
[]
null
0
2022-02-21T16:57:47Z
2022-02-22T08:35:03Z
2022-02-22T08:35:02Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3767.diff", "html_url": "https://github.com/huggingface/datasets/pull/3767", "merged_at": "2022-02-22T08:35:02Z", "patch_url": "https://github.com/huggingface/datasets/pull/3767.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3767" }
A fix + expose a new method, following https://github.com/huggingface/datasets/pull/3670
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3767/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3767/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3766
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3766/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3766/comments
https://api.github.com/repos/huggingface/datasets/issues/3766/events
https://github.com/huggingface/datasets/pull/3766
1,145,829,289
PR_kwDODunzps4zOujH
3,766
Fix head_qa data URL
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-21T13:52:50Z
2022-02-21T14:39:20Z
2022-02-21T14:39:19Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3766.diff", "html_url": "https://github.com/huggingface/datasets/pull/3766", "merged_at": "2022-02-21T14:39:19Z", "patch_url": "https://github.com/huggingface/datasets/pull/3766.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3766" }
Fix #3758.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 1, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3766/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3766/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3765
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3765/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3765/comments
https://api.github.com/repos/huggingface/datasets/issues/3765/events
https://github.com/huggingface/datasets/pull/3765
1,145,126,881
PR_kwDODunzps4zMdIL
3,765
Update URL for tagging app
{ "avatar_url": "https://avatars.githubusercontent.com/u/26859204?v=4", "events_url": "https://api.github.com/users/lewtun/events{/privacy}", "followers_url": "https://api.github.com/users/lewtun/followers", "following_url": "https://api.github.com/users/lewtun/following{/other_user}", "gists_url": "https://api.github.com/users/lewtun/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lewtun", "id": 26859204, "login": "lewtun", "node_id": "MDQ6VXNlcjI2ODU5MjA0", "organizations_url": "https://api.github.com/users/lewtun/orgs", "received_events_url": "https://api.github.com/users/lewtun/received_events", "repos_url": "https://api.github.com/users/lewtun/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lewtun/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lewtun/subscriptions", "type": "User", "url": "https://api.github.com/users/lewtun" }
[]
closed
false
null
[]
null
1
2022-02-20T20:34:31Z
2022-02-20T20:36:10Z
2022-02-20T20:36:06Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3765.diff", "html_url": "https://github.com/huggingface/datasets/pull/3765", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3765.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3765" }
This PR updates the URL for the tagging app to be the one on Spaces.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3765/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3765/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3764
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3764/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3764/comments
https://api.github.com/repos/huggingface/datasets/issues/3764/events
https://github.com/huggingface/datasets/issues/3764
1,145,107,050
I_kwDODunzps5EQPJq
3,764
!
{ "avatar_url": "https://avatars.githubusercontent.com/u/77545307?v=4", "events_url": "https://api.github.com/users/LesiaFedorenko/events{/privacy}", "followers_url": "https://api.github.com/users/LesiaFedorenko/followers", "following_url": "https://api.github.com/users/LesiaFedorenko/following{/other_user}", "gists_url": "https://api.github.com/users/LesiaFedorenko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/LesiaFedorenko", "id": 77545307, "login": "LesiaFedorenko", "node_id": "MDQ6VXNlcjc3NTQ1MzA3", "organizations_url": "https://api.github.com/users/LesiaFedorenko/orgs", "received_events_url": "https://api.github.com/users/LesiaFedorenko/received_events", "repos_url": "https://api.github.com/users/LesiaFedorenko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/LesiaFedorenko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/LesiaFedorenko/subscriptions", "type": "User", "url": "https://api.github.com/users/LesiaFedorenko" }
[ { "color": "E5583E", "default": false, "description": "Related to the dataset viewer on huggingface.co", "id": 3470211881, "name": "dataset-viewer", "node_id": "LA_kwDODunzps7O1zsp", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset-viewer" } ]
closed
false
null
[]
null
0
2022-02-20T19:05:43Z
2022-02-21T08:55:58Z
2022-02-21T08:55:58Z
NONE
null
null
null
## Dataset viewer issue for '*name of the dataset*' **Link:** *link to the dataset viewer page* *short description of the issue* Am I the one who added this dataset ? Yes-No
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3764/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3764/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3763
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3763/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3763/comments
https://api.github.com/repos/huggingface/datasets/issues/3763/events
https://github.com/huggingface/datasets/issues/3763
1,145,099,878
I_kwDODunzps5EQNZm
3,763
It's not possible download `20200501.pt` dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/1514798?v=4", "events_url": "https://api.github.com/users/jvanz/events{/privacy}", "followers_url": "https://api.github.com/users/jvanz/followers", "following_url": "https://api.github.com/users/jvanz/following{/other_user}", "gists_url": "https://api.github.com/users/jvanz/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/jvanz", "id": 1514798, "login": "jvanz", "node_id": "MDQ6VXNlcjE1MTQ3OTg=", "organizations_url": "https://api.github.com/users/jvanz/orgs", "received_events_url": "https://api.github.com/users/jvanz/received_events", "repos_url": "https://api.github.com/users/jvanz/repos", "site_admin": false, "starred_url": "https://api.github.com/users/jvanz/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/jvanz/subscriptions", "type": "User", "url": "https://api.github.com/users/jvanz" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
2
2022-02-20T18:34:58Z
2022-02-21T12:06:12Z
2022-02-21T09:25:06Z
NONE
null
null
null
## Describe the bug The dataset `20200501.pt` is broken. The available datasets: https://dumps.wikimedia.org/ptwiki/ ## Steps to reproduce the bug ```python from datasets import load_dataset dataset = load_dataset("wikipedia", "20200501.pt", beam_runner='DirectRunner') ``` ## Expected results I expect to download the dataset locally. ## Actual results ``` >>> from datasets import load_dataset >>> dataset = load_dataset("wikipedia", "20200501.pt", beam_runner='DirectRunner') Downloading and preparing dataset wikipedia/20200501.pt to /home/jvanz/.cache/huggingface/datasets/wikipedia/20200501.pt/1.0.0/009f923d9b6dd00c00c8cdc7f408f2b47f45dd4f5fb7982a21f9448f4afbe475... /home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/apache_beam/__init__.py:79: UserWarning: This version of Apache Beam has not been sufficiently tested on Python 3.9. You may encounter bugs or missing features. warnings.warn( 0%| | 0/1 [00:00<?, ?it/s] Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/load.py", line 1702, in load_dataset builder_instance.download_and_prepare( File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/builder.py", line 594, in download_and_prepare self._download_and_prepare( File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/builder.py", line 1245, in _download_and_prepare super()._download_and_prepare( File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/builder.py", line 661, in _download_and_prepare split_generators = self._split_generators(dl_manager, **split_generators_kwargs) File "/home/jvanz/.cache/huggingface/modules/datasets_modules/datasets/wikipedia/009f923d9b6dd00c00c8cdc7f408f2b47f45dd4f5fb7982a21f9448f4afbe475/wikipedia.py", line 420, in _split_generators downloaded_files = dl_manager.download_and_extract({"info": info_url}) File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/download_manager.py", line 307, in download_and_extract return self.extract(self.download(url_or_urls)) File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/download_manager.py", line 195, in download downloaded_path_or_paths = map_nested( File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/py_utils.py", line 260, in map_nested mapped = [ File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/py_utils.py", line 261, in <listcomp> _single_map_nested((function, obj, types, None, True)) File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/py_utils.py", line 196, in _single_map_nested return function(data_struct) File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/download_manager.py", line 216, in _download return cached_path(url_or_filename, download_config=download_config) File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/file_utils.py", line 298, in cached_path output_path = get_from_cache( File "/home/jvanz/anaconda3/envs/tf-gpu/lib/python3.9/site-packages/datasets/utils/file_utils.py", line 612, in get_from_cache raise FileNotFoundError(f"Couldn't find file at {url}") FileNotFoundError: Couldn't find file at https://dumps.wikimedia.org/ptwiki/20200501/dumpstatus.json ``` ## Environment info ``` - `datasets` version: 1.18.3 - Platform: Linux-5.3.18-150300.59.49-default-x86_64-with-glibc2.31 - Python version: 3.9.7 - PyArrow version: 6.0.1 ```
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3763/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3763/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3762
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3762/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3762/comments
https://api.github.com/repos/huggingface/datasets/issues/3762/events
https://github.com/huggingface/datasets/issues/3762
1,144,849,557
I_kwDODunzps5EPQSV
3,762
`Dataset.class_encode` should support custom class names
{ "avatar_url": "https://avatars.githubusercontent.com/u/8976546?v=4", "events_url": "https://api.github.com/users/Dref360/events{/privacy}", "followers_url": "https://api.github.com/users/Dref360/followers", "following_url": "https://api.github.com/users/Dref360/following{/other_user}", "gists_url": "https://api.github.com/users/Dref360/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Dref360", "id": 8976546, "login": "Dref360", "node_id": "MDQ6VXNlcjg5NzY1NDY=", "organizations_url": "https://api.github.com/users/Dref360/orgs", "received_events_url": "https://api.github.com/users/Dref360/received_events", "repos_url": "https://api.github.com/users/Dref360/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Dref360/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Dref360/subscriptions", "type": "User", "url": "https://api.github.com/users/Dref360" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
closed
false
null
[]
null
3
2022-02-19T21:21:45Z
2022-02-21T12:16:35Z
2022-02-21T12:16:35Z
CONTRIBUTOR
null
null
null
I can make a PR, just wanted approval before starting. **Is your feature request related to a problem? Please describe.** It is often the case that classes are not ordered in alphabetical order. Current `class_encode_column` sort the classes before indexing. https://github.com/huggingface/datasets/blob/master/src/datasets/arrow_dataset.py#L1235 **Describe the solution you'd like** I would like to add a **optional** parameter `class_names` to `class_encode_column` that would be used for the mapping instead of sorting the unique values. **Describe alternatives you've considered** One can use map instead. I find it harder to read. ```python CLASS_NAMES = ['apple', 'orange', 'potato'] ds = ds.map(lambda item: CLASS_NAMES.index(item[label_column])) # Proposition ds = ds.class_encode_column(label_column, CLASS_NAMES) ``` **Additional context** I can make the PR if this feature is accepted.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3762/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3762/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3761
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3761/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3761/comments
https://api.github.com/repos/huggingface/datasets/issues/3761/events
https://github.com/huggingface/datasets/issues/3761
1,144,830,702
I_kwDODunzps5EPLru
3,761
Know your data for HF hub
{ "avatar_url": "https://avatars.githubusercontent.com/u/20128202?v=4", "events_url": "https://api.github.com/users/Muhtasham/events{/privacy}", "followers_url": "https://api.github.com/users/Muhtasham/followers", "following_url": "https://api.github.com/users/Muhtasham/following{/other_user}", "gists_url": "https://api.github.com/users/Muhtasham/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Muhtasham", "id": 20128202, "login": "Muhtasham", "node_id": "MDQ6VXNlcjIwMTI4MjAy", "organizations_url": "https://api.github.com/users/Muhtasham/orgs", "received_events_url": "https://api.github.com/users/Muhtasham/received_events", "repos_url": "https://api.github.com/users/Muhtasham/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Muhtasham/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Muhtasham/subscriptions", "type": "User", "url": "https://api.github.com/users/Muhtasham" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
closed
false
null
[]
null
1
2022-02-19T19:48:47Z
2022-02-21T14:15:23Z
2022-02-21T14:15:23Z
NONE
null
null
null
**Is your feature request related to a problem? Please describe.** Would be great to see be able to understand datasets with the goal of improving data quality, and helping mitigate fairness and bias issues. **Describe the solution you'd like** Something like https://knowyourdata.withgoogle.com/ for HF hub
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3761/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3761/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3760
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3760/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3760/comments
https://api.github.com/repos/huggingface/datasets/issues/3760/events
https://github.com/huggingface/datasets/issues/3760
1,144,804,558
I_kwDODunzps5EPFTO
3,760
Unable to view the Gradio flagged call back dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/36753484?v=4", "events_url": "https://api.github.com/users/kingabzpro/events{/privacy}", "followers_url": "https://api.github.com/users/kingabzpro/followers", "following_url": "https://api.github.com/users/kingabzpro/following{/other_user}", "gists_url": "https://api.github.com/users/kingabzpro/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kingabzpro", "id": 36753484, "login": "kingabzpro", "node_id": "MDQ6VXNlcjM2NzUzNDg0", "organizations_url": "https://api.github.com/users/kingabzpro/orgs", "received_events_url": "https://api.github.com/users/kingabzpro/received_events", "repos_url": "https://api.github.com/users/kingabzpro/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kingabzpro/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kingabzpro/subscriptions", "type": "User", "url": "https://api.github.com/users/kingabzpro" }
[ { "color": "E5583E", "default": false, "description": "Related to the dataset viewer on huggingface.co", "id": 3470211881, "name": "dataset-viewer", "node_id": "LA_kwDODunzps7O1zsp", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset-viewer" } ]
closed
false
null
[]
null
5
2022-02-19T17:45:08Z
2022-03-22T07:12:11Z
2022-03-22T07:12:11Z
NONE
null
null
null
## Dataset viewer issue for '*savtadepth-flags*' **Link:** *[savtadepth-flags](https://huggingface.co/datasets/kingabzpro/savtadepth-flags)* *with the Gradio 2.8.1 the dataset viers stopped working. I tried to add values manually but its not working. The dataset is also not showing the link with the app https://huggingface.co/spaces/kingabzpro/savtadepth.* Am I the one who added this dataset ? Yes
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3760/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3760/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3759
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3759/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3759/comments
https://api.github.com/repos/huggingface/datasets/issues/3759/events
https://github.com/huggingface/datasets/pull/3759
1,143,400,770
PR_kwDODunzps4zGhQu
3,759
Rename GenerateMode to DownloadMode
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
1
2022-02-18T16:53:53Z
2022-02-22T13:57:24Z
2022-02-22T12:22:52Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3759.diff", "html_url": "https://github.com/huggingface/datasets/pull/3759", "merged_at": "2022-02-22T12:22:52Z", "patch_url": "https://github.com/huggingface/datasets/pull/3759.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3759" }
This PR: - Renames `GenerateMode` to `DownloadMode` - Implements `DeprecatedEnum` - Deprecates `GenerateMode` Close #769.
{ "+1": 2, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 2, "url": "https://api.github.com/repos/huggingface/datasets/issues/3759/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3759/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3758
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3758/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3758/comments
https://api.github.com/repos/huggingface/datasets/issues/3758/events
https://github.com/huggingface/datasets/issues/3758
1,143,366,393
I_kwDODunzps5EJmL5
3,758
head_qa file missing
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
2
2022-02-18T16:32:43Z
2022-02-28T14:29:18Z
2022-02-21T14:39:19Z
CONTRIBUTOR
null
null
null
## Describe the bug A file for the `head_qa` dataset is missing (https://drive.google.com/u/0/uc?export=download&id=1a_95N5zQQoUCq8IBNVZgziHbeM-QxG2t/HEAD_EN/train_HEAD_EN.json) ## Steps to reproduce the bug ```python >>> from datasets import load_dataset >>> load_dataset("head_qa", name="en") ``` ## Expected results The dataset should be loaded ## Actual results ``` Downloading and preparing dataset head_qa/en (download: 75.69 MiB, generated: 2.69 MiB, post-processed: Unknown size, total: 78.38 MiB) to /home/slesage/.cache/huggingface/datasets/head_qa/en/1.1.0/583ab408e8baf54aab378c93715fadc4d8aa51b393e27c3484a877e2ac0278e9... Downloading data: 2.21kB [00:00, 2.05MB/s] Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/load.py", line 1729, in load_dataset builder_instance.download_and_prepare( File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/builder.py", line 594, in download_and_prepare self._download_and_prepare( File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/builder.py", line 665, in _download_and_prepare verify_checksums( File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/utils/info_utils.py", line 40, in verify_checksums raise NonMatchingChecksumError(error_msg + str(bad_urls)) datasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files: ['https://drive.google.com/u/0/uc?export=download&id=1a_95N5zQQoUCq8IBNVZgziHbeM-QxG2t'] ``` ## Environment info - `datasets` version: 1.18.4.dev0 - Platform: Linux-5.11.0-1028-aws-x86_64-with-glibc2.31 - Python version: 3.9.6 - PyArrow version: 6.0.1
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3758/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3758/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3757
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3757/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3757/comments
https://api.github.com/repos/huggingface/datasets/issues/3757/events
https://github.com/huggingface/datasets/pull/3757
1,143,300,880
PR_kwDODunzps4zGK7p
3,757
Add perplexity to metrics
{ "avatar_url": "https://avatars.githubusercontent.com/u/27527747?v=4", "events_url": "https://api.github.com/users/emibaylor/events{/privacy}", "followers_url": "https://api.github.com/users/emibaylor/followers", "following_url": "https://api.github.com/users/emibaylor/following{/other_user}", "gists_url": "https://api.github.com/users/emibaylor/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/emibaylor", "id": 27527747, "login": "emibaylor", "node_id": "MDQ6VXNlcjI3NTI3NzQ3", "organizations_url": "https://api.github.com/users/emibaylor/orgs", "received_events_url": "https://api.github.com/users/emibaylor/received_events", "repos_url": "https://api.github.com/users/emibaylor/repos", "site_admin": false, "starred_url": "https://api.github.com/users/emibaylor/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/emibaylor/subscriptions", "type": "User", "url": "https://api.github.com/users/emibaylor" }
[]
closed
false
null
[]
null
2
2022-02-18T15:52:23Z
2022-02-25T17:13:34Z
2022-02-25T17:13:34Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3757.diff", "html_url": "https://github.com/huggingface/datasets/pull/3757", "merged_at": "2022-02-25T17:13:34Z", "patch_url": "https://github.com/huggingface/datasets/pull/3757.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3757" }
Adding perplexity metric This code differs from the code in [this](https://huggingface.co/docs/transformers/perplexity) HF blog post because the blogpost code fails in at least the following circumstances: - returns nans whenever the stride = 1 - hits a runtime error when the stride is significantly larger than the max model length (e.g. if max_model_length = 512 and stride = 1024) Note that: - As it is, it only works for causal models. Pseudoperplexity can be added later as another metric to work with masked language models. - It takes in a list of strings so that it can be dataset independent. This does mean that it doesn't currently batch inputs, and is therefore relatively slow. - It overwrites the metrics compute() function for a specific perplexity compute() function. This is because the current general metrics compute() function requires model-generated predictions, which doesn't make sense in the context of perplexity
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 1, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3757/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3757/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3756
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3756/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3756/comments
https://api.github.com/repos/huggingface/datasets/issues/3756/events
https://github.com/huggingface/datasets/issues/3756
1,143,273,825
I_kwDODunzps5EJPlh
3,756
Images get decoded when using `map()` with `input_columns` argument on a dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/1430243?v=4", "events_url": "https://api.github.com/users/kklemon/events{/privacy}", "followers_url": "https://api.github.com/users/kklemon/followers", "following_url": "https://api.github.com/users/kklemon/following{/other_user}", "gists_url": "https://api.github.com/users/kklemon/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kklemon", "id": 1430243, "login": "kklemon", "node_id": "MDQ6VXNlcjE0MzAyNDM=", "organizations_url": "https://api.github.com/users/kklemon/orgs", "received_events_url": "https://api.github.com/users/kklemon/received_events", "repos_url": "https://api.github.com/users/kklemon/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kklemon/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kklemon/subscriptions", "type": "User", "url": "https://api.github.com/users/kklemon" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" } ]
null
2
2022-02-18T15:35:38Z
2022-12-13T16:59:06Z
2022-12-13T16:59:06Z
NONE
null
null
null
## Describe the bug The `datasets.features.Image` feature class decodes image data by default. Expectedly, when indexing a dataset or using the `map()` method, images are returned as PIL Image instances. However, when calling `map()` and setting a specific data column with the `input_columns` argument, the image data is passed as raw byte representation to the mapping function. ## Steps to reproduce the bug ```python from datasets import load_dataset from torchvision import transforms from PIL.Image import Image dataset = load_dataset('mnist', split='train') def transform_all_columns(example): # example['image'] is encoded as PIL Image assert isinstance(example['image'], Image) return example def transform_image_column(image): # image is decoded here and represented as raw bytes assert isinstance(image, Image) return image # single-sample dataset for debugging purposes dev = dataset.select([0]) dev.map(transform_all_columns) dev.map(transform_image_column, input_columns='image') ``` ## Expected results Image data should be passed in decoded form, i.e. as PIL Image objects to the mapping function unless the `decode` attribute on the image feature is set to `False`. ## Actual results The mapping function receives images as raw byte data. ## Environment info - `datasets` version: 1.18.3 - Platform: Linux-5.11.0-49-generic-x86_64-with-glibc2.32 - Python version: 3.8.0b4 - PyArrow version: 7.0.0
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3756/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3756/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3755
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3755/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3755/comments
https://api.github.com/repos/huggingface/datasets/issues/3755/events
https://github.com/huggingface/datasets/issues/3755
1,143,032,961
I_kwDODunzps5EIUyB
3,755
Cannot preview dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/2518789?v=4", "events_url": "https://api.github.com/users/frascuchon/events{/privacy}", "followers_url": "https://api.github.com/users/frascuchon/followers", "following_url": "https://api.github.com/users/frascuchon/following{/other_user}", "gists_url": "https://api.github.com/users/frascuchon/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/frascuchon", "id": 2518789, "login": "frascuchon", "node_id": "MDQ6VXNlcjI1MTg3ODk=", "organizations_url": "https://api.github.com/users/frascuchon/orgs", "received_events_url": "https://api.github.com/users/frascuchon/received_events", "repos_url": "https://api.github.com/users/frascuchon/repos", "site_admin": false, "starred_url": "https://api.github.com/users/frascuchon/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/frascuchon/subscriptions", "type": "User", "url": "https://api.github.com/users/frascuchon" }
[ { "color": "E5583E", "default": false, "description": "Related to the dataset viewer on huggingface.co", "id": 3470211881, "name": "dataset-viewer", "node_id": "LA_kwDODunzps7O1zsp", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset-viewer" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" } ]
null
3
2022-02-18T13:06:45Z
2022-02-19T14:30:28Z
2022-02-18T15:41:33Z
NONE
null
null
null
## Dataset viewer issue for '*rubrix/news*' **Link:https://huggingface.co/datasets/rubrix/news** *link to the dataset viewer page* Cannot see the dataset preview: ``` Status code: 400 Exception: Status400Error Message: Not found. Cache is waiting to be refreshed. ``` Am I the one who added this dataset ? No
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3755/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3755/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3754
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3754/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3754/comments
https://api.github.com/repos/huggingface/datasets/issues/3754/events
https://github.com/huggingface/datasets/issues/3754
1,142,886,536
I_kwDODunzps5EHxCI
3,754
Overflowing indices in `select`
{ "avatar_url": "https://avatars.githubusercontent.com/u/8264887?v=4", "events_url": "https://api.github.com/users/lvwerra/events{/privacy}", "followers_url": "https://api.github.com/users/lvwerra/followers", "following_url": "https://api.github.com/users/lvwerra/following{/other_user}", "gists_url": "https://api.github.com/users/lvwerra/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lvwerra", "id": 8264887, "login": "lvwerra", "node_id": "MDQ6VXNlcjgyNjQ4ODc=", "organizations_url": "https://api.github.com/users/lvwerra/orgs", "received_events_url": "https://api.github.com/users/lvwerra/received_events", "repos_url": "https://api.github.com/users/lvwerra/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lvwerra/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lvwerra/subscriptions", "type": "User", "url": "https://api.github.com/users/lvwerra" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
2
2022-02-18T11:30:52Z
2022-02-18T11:38:23Z
2022-02-18T11:38:23Z
MEMBER
null
null
null
## Describe the bug The `Dataset.select` function seems to accept indices that are larger than the dataset size and seems to effectively use `index %len(ds)`. ## Steps to reproduce the bug ```python from datasets import Dataset ds = Dataset.from_dict({"test": [1,2,3]}) ds = ds.select(range(5)) print(ds) print() print(ds["test"]) ``` Result: ```python Dataset({ features: ['test'], num_rows: 5 }) [1, 2, 3, 1, 2] ``` This behaviour is not documented and can lead to unexpected behaviour when for example taking a sample larger than the dataset and thus creating a lot of duplicates. ## Expected results It think this should throw an error or at least a very big warning: ```python IndexError: Invalid key: 5 is out of bounds for size 3 ``` ## Environment info - `datasets` version: 1.18.3 - Platform: macOS-12.0.1-x86_64-i386-64bit - Python version: 3.9.10 - PyArrow version: 7.0.0
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3754/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3754/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3753
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3753/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3753/comments
https://api.github.com/repos/huggingface/datasets/issues/3753/events
https://github.com/huggingface/datasets/issues/3753
1,142,821,144
I_kwDODunzps5EHhEY
3,753
Expanding streaming capabilities
{ "avatar_url": "https://avatars.githubusercontent.com/u/8264887?v=4", "events_url": "https://api.github.com/users/lvwerra/events{/privacy}", "followers_url": "https://api.github.com/users/lvwerra/followers", "following_url": "https://api.github.com/users/lvwerra/following{/other_user}", "gists_url": "https://api.github.com/users/lvwerra/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lvwerra", "id": 8264887, "login": "lvwerra", "node_id": "MDQ6VXNlcjgyNjQ4ODc=", "organizations_url": "https://api.github.com/users/lvwerra/orgs", "received_events_url": "https://api.github.com/users/lvwerra/received_events", "repos_url": "https://api.github.com/users/lvwerra/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lvwerra/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lvwerra/subscriptions", "type": "User", "url": "https://api.github.com/users/lvwerra" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
open
false
null
[]
null
4
2022-02-18T10:45:41Z
2022-05-03T14:22:38Z
null
MEMBER
null
null
null
Some ideas for a few features that could be useful when working with large datasets in streaming mode. ## `filter` for `IterableDataset` Adding filtering to streaming datasets would be useful in several scenarios: - filter a dataset with many languages for a subset of languages - filter a dataset for specific licenses - other custom logic to get a subset The only way to achieve this at the moment is I think through writing a custom loading script and implementing filters there. ## `IterableDataset` to `Dataset` conversion In combination with the above filter a functionality to "play" the whole stream would be useful. The motivation is that often one might filter the dataset to get a manageable size for experimentation. In that case streaming mode is no longer necessary as the filtered dataset is small enough and it would be useful to be able to play through the whole stream to create a normal `Dataset` with all its benefits. ```python ds = load_dataset("some_large_dataset", streaming=True) ds_filter = ds.filter(lambda x: x["lang"]="fr") ds_filter = ds_filter.stream() # here the `IterableDataset` is converted to a `Dataset` ``` Naturally, this could be expanded with `stream(n=1000)` which creates a `Dataset` with the first `n` elements similar to `take`. ## Stream to the Hub While streaming allows to use a dataset as is without saving the whole dataset on the local machine it is currently not possible to process a dataset and add it to the hub. The only way to do this is by downloading the full dataset and saving the processed dataset again before pushing them to the hub. The API could looks something like: ```python ds = load_dataset("some_large_dataset", streaming=True) ds_filter = ds.filter(some_filter_func) ds_processed = ds_filter.map(some_processing_func) ds_processed.push_to_hub("new_better_dataset", batch_size=100_000) ``` Under the hood this could be done by processing and aggregating `batch_size` elements and then pushing that batch as a single file to the hub. With this functionality one could process and create TB scale datasets while only requiring size of `batch_size` local disk space. cc @lhoestq @albertvillanova
{ "+1": 3, "-1": 0, "confused": 0, "eyes": 0, "heart": 3, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 6, "url": "https://api.github.com/repos/huggingface/datasets/issues/3753/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3753/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3752
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3752/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3752/comments
https://api.github.com/repos/huggingface/datasets/issues/3752/events
https://github.com/huggingface/datasets/pull/3752
1,142,627,889
PR_kwDODunzps4zD1D9
3,752
Update metadata JSON for cats_vs_dogs dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-18T08:32:53Z
2022-02-18T14:56:12Z
2022-02-18T14:56:11Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3752.diff", "html_url": "https://github.com/huggingface/datasets/pull/3752", "merged_at": "2022-02-18T14:56:11Z", "patch_url": "https://github.com/huggingface/datasets/pull/3752.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3752" }
Note that the number of examples in the train split was already fixed in the dataset card. Fix #3750.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3752/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3752/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3751
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3751/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3751/comments
https://api.github.com/repos/huggingface/datasets/issues/3751/events
https://github.com/huggingface/datasets/pull/3751
1,142,609,327
PR_kwDODunzps4zDw9_
3,751
Fix typo in train split name
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-18T08:18:04Z
2022-02-18T14:28:52Z
2022-02-18T14:28:52Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3751.diff", "html_url": "https://github.com/huggingface/datasets/pull/3751", "merged_at": "2022-02-18T14:28:52Z", "patch_url": "https://github.com/huggingface/datasets/pull/3751.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3751" }
In the README guide (and consequently in many datasets) there was a typo in the train split name: ``` | Tain | Valid | Test | ``` This PR: - fixes the typo in the train split name - fixes the column alignment of the split tables in the README guide and in all datasets.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3751/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3751/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3750
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3750/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3750/comments
https://api.github.com/repos/huggingface/datasets/issues/3750/events
https://github.com/huggingface/datasets/issues/3750
1,142,408,331
I_kwDODunzps5EF8SL
3,750
`NonMatchingSplitsSizesError` for cats_vs_dogs dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/25360440?v=4", "events_url": "https://api.github.com/users/jaketae/events{/privacy}", "followers_url": "https://api.github.com/users/jaketae/followers", "following_url": "https://api.github.com/users/jaketae/following{/other_user}", "gists_url": "https://api.github.com/users/jaketae/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/jaketae", "id": 25360440, "login": "jaketae", "node_id": "MDQ6VXNlcjI1MzYwNDQw", "organizations_url": "https://api.github.com/users/jaketae/orgs", "received_events_url": "https://api.github.com/users/jaketae/received_events", "repos_url": "https://api.github.com/users/jaketae/repos", "site_admin": false, "starred_url": "https://api.github.com/users/jaketae/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/jaketae/subscriptions", "type": "User", "url": "https://api.github.com/users/jaketae" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
1
2022-02-18T05:46:39Z
2022-02-18T14:56:11Z
2022-02-18T14:56:11Z
CONTRIBUTOR
null
null
null
## Describe the bug Cannot download cats_vs_dogs dataset due to `NonMatchingSplitsSizesError`. ## Steps to reproduce the bug ```python from datasets import load_dataset dataset = load_dataset("cats_vs_dogs") ``` ## Expected results Loading is successful. ## Actual results ``` NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=7503250, num_examples=23422, dataset_name='cats_vs_dogs'), 'recorded': SplitInfo(name='train', num_bytes=7262410, num_examples=23410, dataset_name='cats_vs_dogs')}] ``` ## Environment info Reproduced on a fresh [Colab notebook](https://colab.research.google.com/drive/13GTvrSJbBGvL2ybDdXCBZwATd6FOkMub?usp=sharing). ## Additional Context Originally reported in https://github.com/huggingface/transformers/issues/15698. cc @mariosasko
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3750/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3750/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3749
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3749/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3749/comments
https://api.github.com/repos/huggingface/datasets/issues/3749/events
https://github.com/huggingface/datasets/pull/3749
1,142,156,678
PR_kwDODunzps4zCKqg
3,749
Add tqdm arguments
{ "avatar_url": "https://avatars.githubusercontent.com/u/28087825?v=4", "events_url": "https://api.github.com/users/penguinwang96825/events{/privacy}", "followers_url": "https://api.github.com/users/penguinwang96825/followers", "following_url": "https://api.github.com/users/penguinwang96825/following{/other_user}", "gists_url": "https://api.github.com/users/penguinwang96825/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/penguinwang96825", "id": 28087825, "login": "penguinwang96825", "node_id": "MDQ6VXNlcjI4MDg3ODI1", "organizations_url": "https://api.github.com/users/penguinwang96825/orgs", "received_events_url": "https://api.github.com/users/penguinwang96825/received_events", "repos_url": "https://api.github.com/users/penguinwang96825/repos", "site_admin": false, "starred_url": "https://api.github.com/users/penguinwang96825/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/penguinwang96825/subscriptions", "type": "User", "url": "https://api.github.com/users/penguinwang96825" }
[]
closed
false
null
[]
null
6
2022-02-18T01:34:46Z
2022-03-08T09:38:48Z
2022-03-08T09:38:48Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3749.diff", "html_url": "https://github.com/huggingface/datasets/pull/3749", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3749.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3749" }
In this PR, tqdm arguments can be passed to the map() function and such, in order to be more flexible.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3749/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3749/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3748
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3748/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3748/comments
https://api.github.com/repos/huggingface/datasets/issues/3748/events
https://github.com/huggingface/datasets/pull/3748
1,142,128,763
PR_kwDODunzps4zCEyM
3,748
Add tqdm arguments
{ "avatar_url": "https://avatars.githubusercontent.com/u/28087825?v=4", "events_url": "https://api.github.com/users/penguinwang96825/events{/privacy}", "followers_url": "https://api.github.com/users/penguinwang96825/followers", "following_url": "https://api.github.com/users/penguinwang96825/following{/other_user}", "gists_url": "https://api.github.com/users/penguinwang96825/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/penguinwang96825", "id": 28087825, "login": "penguinwang96825", "node_id": "MDQ6VXNlcjI4MDg3ODI1", "organizations_url": "https://api.github.com/users/penguinwang96825/orgs", "received_events_url": "https://api.github.com/users/penguinwang96825/received_events", "repos_url": "https://api.github.com/users/penguinwang96825/repos", "site_admin": false, "starred_url": "https://api.github.com/users/penguinwang96825/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/penguinwang96825/subscriptions", "type": "User", "url": "https://api.github.com/users/penguinwang96825" }
[]
closed
false
null
[]
null
0
2022-02-18T00:47:55Z
2022-02-18T00:59:15Z
2022-02-18T00:59:15Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3748.diff", "html_url": "https://github.com/huggingface/datasets/pull/3748", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3748.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3748" }
In this PR, there are two changes. 1. It is able to show the progress bar by adding the length of the iterator. 2. Pass in tqdm_kwargs so that can enable more feasibility for the control of tqdm library.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3748/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3748/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3747
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3747/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3747/comments
https://api.github.com/repos/huggingface/datasets/issues/3747/events
https://github.com/huggingface/datasets/issues/3747
1,141,688,854
I_kwDODunzps5EDMoW
3,747
Passing invalid subset should throw an error
{ "avatar_url": "https://avatars.githubusercontent.com/u/13238952?v=4", "events_url": "https://api.github.com/users/jxmorris12/events{/privacy}", "followers_url": "https://api.github.com/users/jxmorris12/followers", "following_url": "https://api.github.com/users/jxmorris12/following{/other_user}", "gists_url": "https://api.github.com/users/jxmorris12/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/jxmorris12", "id": 13238952, "login": "jxmorris12", "node_id": "MDQ6VXNlcjEzMjM4OTUy", "organizations_url": "https://api.github.com/users/jxmorris12/orgs", "received_events_url": "https://api.github.com/users/jxmorris12/received_events", "repos_url": "https://api.github.com/users/jxmorris12/repos", "site_admin": false, "starred_url": "https://api.github.com/users/jxmorris12/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/jxmorris12/subscriptions", "type": "User", "url": "https://api.github.com/users/jxmorris12" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
open
false
null
[]
null
0
2022-02-17T18:16:11Z
2022-02-17T18:16:11Z
null
CONTRIBUTOR
null
null
null
## Describe the bug Only some datasets have a subset (as in `load_dataset(name, subset)`). If you pass an invalid subset, an error should be thrown. ## Steps to reproduce the bug ```python import datasets datasets.load_dataset('rotten_tomatoes', 'asdfasdfa') ``` ## Expected results This should break, since `'asdfasdfa'` isn't a subset of the `rotten_tomatoes` dataset. ## Actual results This API call silently succeeds.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3747/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3747/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3746
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3746/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3746/comments
https://api.github.com/repos/huggingface/datasets/issues/3746/events
https://github.com/huggingface/datasets/pull/3746
1,141,612,810
PR_kwDODunzps4zAS-C
3,746
Use the same seed to shuffle shards and metadata in streaming mode
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
0
2022-02-17T17:06:31Z
2022-02-23T15:00:59Z
2022-02-23T15:00:58Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3746.diff", "html_url": "https://github.com/huggingface/datasets/pull/3746", "merged_at": "2022-02-23T15:00:58Z", "patch_url": "https://github.com/huggingface/datasets/pull/3746.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3746" }
When shuffling in streaming mode, those two entangled lists are shuffled independently. In this PR I changed this to shuffle the lists of same length with the exact same seed, in order for the files and metadata to still be aligned. ```python gen_kwargs = { "files": [os.path.join(data_dir, filename) for filename in all_files], "metadata_files": [all_metadata[filename] for filename in all_files], } ``` IMO this is important to avoid big but silent issues. Fix https://github.com/huggingface/datasets/issues/3744
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3746/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3746/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3745
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3745/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3745/comments
https://api.github.com/repos/huggingface/datasets/issues/3745/events
https://github.com/huggingface/datasets/pull/3745
1,141,520,953
PR_kwDODunzps4y__m2
3,745
Add mIoU metric
{ "avatar_url": "https://avatars.githubusercontent.com/u/48327001?v=4", "events_url": "https://api.github.com/users/NielsRogge/events{/privacy}", "followers_url": "https://api.github.com/users/NielsRogge/followers", "following_url": "https://api.github.com/users/NielsRogge/following{/other_user}", "gists_url": "https://api.github.com/users/NielsRogge/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/NielsRogge", "id": 48327001, "login": "NielsRogge", "node_id": "MDQ6VXNlcjQ4MzI3MDAx", "organizations_url": "https://api.github.com/users/NielsRogge/orgs", "received_events_url": "https://api.github.com/users/NielsRogge/received_events", "repos_url": "https://api.github.com/users/NielsRogge/repos", "site_admin": false, "starred_url": "https://api.github.com/users/NielsRogge/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/NielsRogge/subscriptions", "type": "User", "url": "https://api.github.com/users/NielsRogge" }
[]
closed
false
null
[]
null
3
2022-02-17T15:52:17Z
2022-03-08T13:20:26Z
2022-03-08T13:20:26Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3745.diff", "html_url": "https://github.com/huggingface/datasets/pull/3745", "merged_at": "2022-03-08T13:20:26Z", "patch_url": "https://github.com/huggingface/datasets/pull/3745.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3745" }
This PR adds the mean Intersection-over-Union metric to the library, useful for tasks like semantic segmentation. It is entirely based on mmseg's [implementation](https://github.com/open-mmlab/mmsegmentation/blob/master/mmseg/core/evaluation/metrics.py). I've removed any PyTorch dependency, and rely on Numpy only.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3745/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3745/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3744
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3744/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3744/comments
https://api.github.com/repos/huggingface/datasets/issues/3744/events
https://github.com/huggingface/datasets/issues/3744
1,141,461,165
I_kwDODunzps5ECVCt
3,744
Better shards shuffling in streaming mode
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" }, { "color": "fef2c0", "default": false, "description": "", "id": 3287858981, "name": "streaming", "node_id": "MDU6TGFiZWwzMjg3ODU4OTgx", "url": "https://api.github.com/repos/huggingface/datasets/labels/streaming" } ]
closed
false
null
[]
null
0
2022-02-17T15:07:21Z
2022-02-23T15:00:58Z
2022-02-23T15:00:58Z
MEMBER
null
null
null
Sometimes a dataset script has a `_split_generators` that returns several files as well as the corresponding metadata of each file. It often happens that they end up in two separate lists in the `gen_kwargs`: ```python gen_kwargs = { "files": [os.path.join(data_dir, filename) for filename in all_files], "metadata_files": [all_metadata[filename] for filename in all_files], } ``` It happened for Multilingual Spoken Words for example in #3666 However currently **the two lists are shuffled independently** when shuffling the shards in streaming mode. This leads to `_generate_examples` not having the right metadata for each file. To prevent this issue I suggest that we always shuffle lists of the same length the exact same way to avoid such a big but silent issue. cc @polinaeterna
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3744/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3744/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3743
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3743/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3743/comments
https://api.github.com/repos/huggingface/datasets/issues/3743/events
https://github.com/huggingface/datasets/pull/3743
1,141,176,011
PR_kwDODunzps4y-2Do
3,743
initial monash time series forecasting repository
{ "avatar_url": "https://avatars.githubusercontent.com/u/8100?v=4", "events_url": "https://api.github.com/users/kashif/events{/privacy}", "followers_url": "https://api.github.com/users/kashif/followers", "following_url": "https://api.github.com/users/kashif/following{/other_user}", "gists_url": "https://api.github.com/users/kashif/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kashif", "id": 8100, "login": "kashif", "node_id": "MDQ6VXNlcjgxMDA=", "organizations_url": "https://api.github.com/users/kashif/orgs", "received_events_url": "https://api.github.com/users/kashif/received_events", "repos_url": "https://api.github.com/users/kashif/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kashif/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kashif/subscriptions", "type": "User", "url": "https://api.github.com/users/kashif" }
[]
closed
false
null
[]
null
3
2022-02-17T10:51:31Z
2022-03-21T09:54:41Z
2022-03-21T09:50:16Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3743.diff", "html_url": "https://github.com/huggingface/datasets/pull/3743", "merged_at": "2022-03-21T09:50:16Z", "patch_url": "https://github.com/huggingface/datasets/pull/3743.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3743" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3743/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3743/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3742
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3742/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3742/comments
https://api.github.com/repos/huggingface/datasets/issues/3742/events
https://github.com/huggingface/datasets/pull/3742
1,141,174,549
PR_kwDODunzps4y-1v5
3,742
Fix ValueError message formatting in int2str
{ "avatar_url": "https://avatars.githubusercontent.com/u/41182803?v=4", "events_url": "https://api.github.com/users/akulchik/events{/privacy}", "followers_url": "https://api.github.com/users/akulchik/followers", "following_url": "https://api.github.com/users/akulchik/following{/other_user}", "gists_url": "https://api.github.com/users/akulchik/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/akulchik", "id": 41182803, "login": "akulchik", "node_id": "MDQ6VXNlcjQxMTgyODAz", "organizations_url": "https://api.github.com/users/akulchik/orgs", "received_events_url": "https://api.github.com/users/akulchik/received_events", "repos_url": "https://api.github.com/users/akulchik/repos", "site_admin": false, "starred_url": "https://api.github.com/users/akulchik/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/akulchik/subscriptions", "type": "User", "url": "https://api.github.com/users/akulchik" }
[]
closed
false
null
[]
null
0
2022-02-17T10:50:08Z
2022-02-17T15:32:02Z
2022-02-17T15:32:02Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3742.diff", "html_url": "https://github.com/huggingface/datasets/pull/3742", "merged_at": "2022-02-17T15:32:02Z", "patch_url": "https://github.com/huggingface/datasets/pull/3742.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3742" }
Hi! I bumped into this particular `ValueError` during my work (because an instance of `np.int64` was passed instead of regular Python `int`), and so I had to `print(type(values))` myself. Apparently, it's just the missing `f` to make message an f-string. It ain't much for a contribution, but it's honest work. Hope it spares someone else a few seconds in the future 😃
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3742/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3742/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3741
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3741/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3741/comments
https://api.github.com/repos/huggingface/datasets/issues/3741/events
https://github.com/huggingface/datasets/pull/3741
1,141,132,649
PR_kwDODunzps4y-syt
3,741
Rm sphinx doc
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
[]
closed
false
null
[]
null
0
2022-02-17T10:11:37Z
2022-02-17T10:15:17Z
2022-02-17T10:15:12Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3741.diff", "html_url": "https://github.com/huggingface/datasets/pull/3741", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3741.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3741" }
Checklist - [x] Update circle ci yaml - [x] Delete sphinx static & python files in docs dir - [x] Update readme in docs dir - [ ] Update docs config in setup.py
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3741/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3741/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3740
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3740/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3740/comments
https://api.github.com/repos/huggingface/datasets/issues/3740/events
https://github.com/huggingface/datasets/pull/3740
1,140,720,739
PR_kwDODunzps4y9XAP
3,740
Support streaming for pubmed
{ "avatar_url": "https://avatars.githubusercontent.com/u/77638579?v=4", "events_url": "https://api.github.com/users/abhi-mosaic/events{/privacy}", "followers_url": "https://api.github.com/users/abhi-mosaic/followers", "following_url": "https://api.github.com/users/abhi-mosaic/following{/other_user}", "gists_url": "https://api.github.com/users/abhi-mosaic/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/abhi-mosaic", "id": 77638579, "login": "abhi-mosaic", "node_id": "MDQ6VXNlcjc3NjM4NTc5", "organizations_url": "https://api.github.com/users/abhi-mosaic/orgs", "received_events_url": "https://api.github.com/users/abhi-mosaic/received_events", "repos_url": "https://api.github.com/users/abhi-mosaic/repos", "site_admin": false, "starred_url": "https://api.github.com/users/abhi-mosaic/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/abhi-mosaic/subscriptions", "type": "User", "url": "https://api.github.com/users/abhi-mosaic" }
[]
closed
false
null
[]
null
3
2022-02-17T00:18:22Z
2022-02-18T14:42:13Z
2022-02-18T14:42:13Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3740.diff", "html_url": "https://github.com/huggingface/datasets/pull/3740", "merged_at": "2022-02-18T14:42:13Z", "patch_url": "https://github.com/huggingface/datasets/pull/3740.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3740" }
This PR makes some minor changes to the `pubmed` dataset to allow for `streaming=True`. Fixes #3739. Basically, I followed the C4 dataset which works in streaming mode as an example, and made the following changes: * Change URL prefix from `ftp://` to `https://` * Explicilty `open` the filename and pass the XML contents to `etree.fromstring(xml_str)` The Github diff tool makes it look like the changes are larger than they are, sorry about that. I tested locally and the `pubmed` dataset now works in both normal and streaming modes. There is some overhead at the start of each shard in streaming mode as building the XML tree online is quite slow (each pubmed .xml.gz file is ~20MB), but the overhead gets amortized over all the samples in the shard. On my laptop with a single CPU worker I am able to stream at about ~600 samples/s.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3740/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3740/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3739
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3739/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3739/comments
https://api.github.com/repos/huggingface/datasets/issues/3739/events
https://github.com/huggingface/datasets/issues/3739
1,140,329,189
I_kwDODunzps5D-Arl
3,739
Pubmed dataset does not work in streaming mode
{ "avatar_url": "https://avatars.githubusercontent.com/u/77638579?v=4", "events_url": "https://api.github.com/users/abhi-mosaic/events{/privacy}", "followers_url": "https://api.github.com/users/abhi-mosaic/followers", "following_url": "https://api.github.com/users/abhi-mosaic/following{/other_user}", "gists_url": "https://api.github.com/users/abhi-mosaic/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/abhi-mosaic", "id": 77638579, "login": "abhi-mosaic", "node_id": "MDQ6VXNlcjc3NjM4NTc5", "organizations_url": "https://api.github.com/users/abhi-mosaic/orgs", "received_events_url": "https://api.github.com/users/abhi-mosaic/received_events", "repos_url": "https://api.github.com/users/abhi-mosaic/repos", "site_admin": false, "starred_url": "https://api.github.com/users/abhi-mosaic/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/abhi-mosaic/subscriptions", "type": "User", "url": "https://api.github.com/users/abhi-mosaic" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
1
2022-02-16T17:13:37Z
2022-02-18T14:42:13Z
2022-02-18T14:42:13Z
CONTRIBUTOR
null
null
null
## Describe the bug Trying to use the `pubmed` dataset with `streaming=True` fails. ## Steps to reproduce the bug ```python import datasets pubmed_train = datasets.load_dataset('pubmed', split='train', streaming=True) print (next(iter(pubmed_train))) ``` ## Expected results I would expect to see the first training sample from the pubmed dataset. ## Actual results ``` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/Users/abhinav/Documents/mosaicml/mosaicml_venv/lib/python3.8/site-packages/datasets/iterable_dataset.py", line 367, in __iter__ for key, example in self._iter(): File "/Users/abhinav/Documents/mosaicml/mosaicml_venv/lib/python3.8/site-packages/datasets/iterable_dataset.py", line 364, in _iter yield from ex_iterable File "/Users/abhinav/Documents/mosaicml/mosaicml_venv/lib/python3.8/site-packages/datasets/iterable_dataset.py", line 79, in __iter__ for key, example in self.generate_examples_fn(**self.kwargs): File "/Users/abhinav/.cache/huggingface/modules/datasets_modules/datasets/pubmed/9715addf10c42a7877a2149ae0c5f2fddabefc775cd1bd9b03ac3f012b86ce46/pubmed.py", line 373, in _generate_examples tree = etree.parse(filename) File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/xml/etree/ElementTree.py", line 1202, in parse tree.parse(source, parser) File "/Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.8/lib/python3.8/xml/etree/ElementTree.py", line 584, in parse source = open(source, "rb") FileNotFoundError: [Errno 2] No such file or directory: 'gzip://pubmed21n0001.xml::ftp://ftp.ncbi.nlm.nih.gov/pubmed/baseline/pubmed21n0001.xml.gz' ``` ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version: 1.18.2 - Platform: macOS-11.4-x86_64-i386-64bit - Python version: 3.8.2 - PyArrow version: 6.0.0 ## Comments The error looks like an issue with `open` vs. `xopen` inside the `xml` package. It looks like it's trying to open the remote source URL, which has been edited with prefix `gzip://...`. Maybe there can be an explicit `xopen` before passing the raw data to `etree`, something like: ```python # Before tree = etree.parse(filename) root = tree.getroot() # After with xopen(filename) as f: data_str = f.read() root = etree.fromstring(data_str) ```
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3739/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3739/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3738
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3738/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3738/comments
https://api.github.com/repos/huggingface/datasets/issues/3738/events
https://github.com/huggingface/datasets/issues/3738
1,140,164,253
I_kwDODunzps5D9Yad
3,738
For data-only datasets, streaming and non-streaming don't behave the same
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
open
false
null
[]
null
9
2022-02-16T15:20:57Z
2022-02-21T14:24:55Z
null
CONTRIBUTOR
null
null
null
See https://huggingface.co/datasets/huggingface/transformers-metadata: it only contains two JSON files. In streaming mode, the files are concatenated, and thus the rows might be dictionaries with different keys: ```python import datasets as ds iterable_dataset = ds.load_dataset("huggingface/transformers-metadata", split="train", streaming=True); rows = list(iterable_dataset.take(100)) rows[0] # {'model_type': 'albert', 'pytorch': True, 'tensorflow': True, 'flax': True, 'processor': 'AutoTokenizer'} rows[99] # {'model_class': 'BartModel', 'pipeline_tag': 'feature-extraction', 'auto_class': 'AutoModel'} ``` In normal mode, an exception is thrown: ```python import datasets as ds dataset = ds.load_dataset("huggingface/transformers-metadata", split="train"); ``` ``` ValueError: Couldn't cast model_class: string pipeline_tag: string auto_class: string to {'model_type': Value(dtype='string', id=None), 'pytorch': Value(dtype='bool', id=None), 'tensorflow': Value(dtype='bool', id=None), 'flax': Value(dtype='bool', id=None), 'processor': Value(dtype='string', id=None)} because column names don't match ```
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3738/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3738/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3737
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3737/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3737/comments
https://api.github.com/repos/huggingface/datasets/issues/3737/events
https://github.com/huggingface/datasets/pull/3737
1,140,148,050
PR_kwDODunzps4y7uFf
3,737
Make RedCaps streamable
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[]
closed
false
null
[]
null
0
2022-02-16T15:12:23Z
2022-02-16T15:28:38Z
2022-02-16T15:28:37Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3737.diff", "html_url": "https://github.com/huggingface/datasets/pull/3737", "merged_at": "2022-02-16T15:28:37Z", "patch_url": "https://github.com/huggingface/datasets/pull/3737.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3737" }
Make RedCaps streamable. @lhoestq Using `data/redcaps_v1.0_annotations.zip` as a download URL gives an error locally when running `datasets-cli test` (will investigate this another time)
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3737/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3737/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3736
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3736/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3736/comments
https://api.github.com/repos/huggingface/datasets/issues/3736/events
https://github.com/huggingface/datasets/pull/3736
1,140,134,483
PR_kwDODunzps4y7rMR
3,736
Local paths in common voice
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
2
2022-02-16T15:01:29Z
2022-09-21T14:58:38Z
2022-02-22T09:13:43Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3736.diff", "html_url": "https://github.com/huggingface/datasets/pull/3736", "merged_at": "2022-02-22T09:13:43Z", "patch_url": "https://github.com/huggingface/datasets/pull/3736.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3736" }
Continuation of https://github.com/huggingface/datasets/pull/3664: - pass the `streaming` parameter to _split_generator - update @anton-l's code to use this parameter for `common_voice` - add a comment to explain why we use `download_and_extract` in non-streaming and `iter_archive` in streaming Now the `common_voice` dataset has a local path back in `ds["path"]`, and this field is `None` in streaming mode. cc @patrickvonplaten @anton-l @albertvillanova Fix #3663.
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3736/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3736/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3735
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3735/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3735/comments
https://api.github.com/repos/huggingface/datasets/issues/3735/events
https://github.com/huggingface/datasets/issues/3735
1,140,087,891
I_kwDODunzps5D9FxT
3,735
Performance of `datasets` at scale
{ "avatar_url": "https://avatars.githubusercontent.com/u/8264887?v=4", "events_url": "https://api.github.com/users/lvwerra/events{/privacy}", "followers_url": "https://api.github.com/users/lvwerra/followers", "following_url": "https://api.github.com/users/lvwerra/following{/other_user}", "gists_url": "https://api.github.com/users/lvwerra/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lvwerra", "id": 8264887, "login": "lvwerra", "node_id": "MDQ6VXNlcjgyNjQ4ODc=", "organizations_url": "https://api.github.com/users/lvwerra/orgs", "received_events_url": "https://api.github.com/users/lvwerra/received_events", "repos_url": "https://api.github.com/users/lvwerra/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lvwerra/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lvwerra/subscriptions", "type": "User", "url": "https://api.github.com/users/lvwerra" }
[]
open
false
null
[]
null
5
2022-02-16T14:23:32Z
2022-03-15T09:15:29Z
null
MEMBER
null
null
null
# Performance of `datasets` at 1TB scale ## What is this? During the processing of a large dataset I monitored the performance of the `datasets` library to see if there are any bottlenecks. The insights of this analysis could guide the decision making to improve the performance of the library. ## Dataset The dataset is a 1.1TB extract from GitHub with 120M code files and is stored as 5000 `.json.gz` files. The goal of the preprocessing is to remove duplicates and filter files based on their stats. While the calculating of the hashes for deduplication and stats for filtering can be parallelized the filtering itself is run with a single process. After processing the files are pushed to the hub. ## Machine The experiment was run on a `m1` machine on GCP with 96 CPU cores and 1.3TB RAM. ## Performance breakdown - Loading the data **3.5h** (_30sec_ from cache) - **1h57min** single core loading (not sure what is going on here, corresponds to second progress bar) - **1h10min** multi core json reading - **20min** remaining time before and after the two main processes mentioned above - Process the data **2h** (_20min_ from cache) - **20min** Getting reading for processing - **40min** Hashing and files stats (96 workers) - **58min** Deduplication filtering (single worker) - Save parquet files **5h** - Saving 1000 parquet files (16 workers) - Push to hub **37min** - **34min** git add - **3min** git push (several hours with `Repository.git_push()`) ## Conclusion It appears that loading and saving the data is the main bottleneck at that scale (**8.5h**) whereas processing (**2h**) and pushing the data to the hub (**0.5h**) is relatively fast. To optimize the performance at this scale it would make sense to consider such an end-to-end example and target the bottlenecks which seem to be loading from and saving to disk. The processing itself seems to run relatively fast. ## Notes - map operation on a 1TB dataset with 96 workers requires >1TB RAM - map operation does not maintain 100% CPU utilization with 96 workers - sometimes when the script crashes all the data files have a corresponding `*.lock` file in the data folder (or multiple e.g. `*.lock.lock` when it happened a several times). This causes the cache **not** to be triggered (which is significant at that scale) - i guess because there are new data files - parallelizing `to_parquet` decreased the saving time from 17h to 5h, however adding more workers at this point had almost no effect. not sure if this is: a) a bug in my parallelization logic, b) i/o limit to load data form disk to memory or c) i/o limit to write from memory to disk. - Using `Repository.git_push()` was much slower than using command line `git-lfs` - 10-20MB/s vs. 300MB/s! The `Dataset.push_to_hub()` function is even slower as it only uploads one file at a time with only a few MB/s, whereas `Repository.git_push()` pushes files in parallel (each at a similar speed). cc @lhoestq @julien-c @LysandreJik @SBrandeis
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 5, "heart": 13, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 18, "url": "https://api.github.com/repos/huggingface/datasets/issues/3735/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3735/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3734
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3734/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3734/comments
https://api.github.com/repos/huggingface/datasets/issues/3734/events
https://github.com/huggingface/datasets/pull/3734
1,140,050,336
PR_kwDODunzps4y7ZU2
3,734
Fix bugs in NewsQA dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-16T13:51:28Z
2022-02-17T07:54:26Z
2022-02-17T07:54:25Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3734.diff", "html_url": "https://github.com/huggingface/datasets/pull/3734", "merged_at": "2022-02-17T07:54:25Z", "patch_url": "https://github.com/huggingface/datasets/pull/3734.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3734" }
Fix #3733.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3734/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3734/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3733
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3733/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3733/comments
https://api.github.com/repos/huggingface/datasets/issues/3733/events
https://github.com/huggingface/datasets/issues/3733
1,140,011,378
I_kwDODunzps5D8zFy
3,733
Bugs in NewsQA dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
0
2022-02-16T13:17:37Z
2022-02-17T07:54:25Z
2022-02-17T07:54:25Z
MEMBER
null
null
null
## Describe the bug NewsQA dataset has the following bugs: - the field `validated_answers` is an exact copy of the field `answers` but with the addition of `'count': [0]` to each dict - the field `badQuestion` does not appear in `answers` nor `validated_answers` ## Steps to reproduce the bug By inspecting the dataset script we can see that: - the parsing of `validated_answers` is a copy-paste of the one for `answers` - the `badQuestion` field is ignored in the parsing of both `answers` and `validated_answers`
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3733/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3733/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3732
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3732/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3732/comments
https://api.github.com/repos/huggingface/datasets/issues/3732/events
https://github.com/huggingface/datasets/pull/3732
1,140,004,022
PR_kwDODunzps4y7PTU
3,732
Support streaming in size estimation function in `push_to_hub`
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[]
closed
false
null
[]
null
2
2022-02-16T13:10:48Z
2022-02-21T18:18:45Z
2022-02-21T18:18:44Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3732.diff", "html_url": "https://github.com/huggingface/datasets/pull/3732", "merged_at": "2022-02-21T18:18:44Z", "patch_url": "https://github.com/huggingface/datasets/pull/3732.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3732" }
This PR adds the streamable version of `os.path.getsize` (`fsspec` can return `None`, so we fall back to `fs.open` to make it more robust) to account for possible streamable paths in the nested `extra_nbytes_visitor` function inside `push_to_hub`.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3732/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3732/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3731
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3731/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3731/comments
https://api.github.com/repos/huggingface/datasets/issues/3731/events
https://github.com/huggingface/datasets/pull/3731
1,139,626,362
PR_kwDODunzps4y5-hi
3,731
Fix Multi-News dataset metadata and card
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-16T07:14:57Z
2022-02-16T08:48:47Z
2022-02-16T08:48:47Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3731.diff", "html_url": "https://github.com/huggingface/datasets/pull/3731", "merged_at": "2022-02-16T08:48:46Z", "patch_url": "https://github.com/huggingface/datasets/pull/3731.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3731" }
Fix #3730.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3731/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3731/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3730
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3730/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3730/comments
https://api.github.com/repos/huggingface/datasets/issues/3730/events
https://github.com/huggingface/datasets/issues/3730
1,139,545,613
I_kwDODunzps5D7BYN
3,730
Checksum Error when loading multi-news dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/60560991?v=4", "events_url": "https://api.github.com/users/byw2/events{/privacy}", "followers_url": "https://api.github.com/users/byw2/followers", "following_url": "https://api.github.com/users/byw2/following{/other_user}", "gists_url": "https://api.github.com/users/byw2/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/byw2", "id": 60560991, "login": "byw2", "node_id": "MDQ6VXNlcjYwNTYwOTkx", "organizations_url": "https://api.github.com/users/byw2/orgs", "received_events_url": "https://api.github.com/users/byw2/received_events", "repos_url": "https://api.github.com/users/byw2/repos", "site_admin": false, "starred_url": "https://api.github.com/users/byw2/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/byw2/subscriptions", "type": "User", "url": "https://api.github.com/users/byw2" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
1
2022-02-16T05:11:08Z
2022-02-16T20:05:06Z
2022-02-16T08:48:46Z
NONE
null
null
null
## Describe the bug When using the load_dataset function from datasets module to load the Multi-News dataset, does not load the dataset but throws Checksum Error instead. ## Steps to reproduce the bug ```python from datasets import load_dataset dataset = load_dataset("multi_news") ``` ## Expected results Should download and load Multi-News dataset. ## Actual results Throws the following error and cannot load data successfully: ``` NonMatchingChecksumError: Checksums didn't match for dataset source files: ['https://drive.google.com/uc?export=download&id=1vRY2wM6rlOZrf9exGTm5pXj5ExlVwJ0C'] ``` Could this issue please be looked at? Thanks!
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3730/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3730/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3729
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3729/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3729/comments
https://api.github.com/repos/huggingface/datasets/issues/3729/events
https://github.com/huggingface/datasets/issues/3729
1,139,398,442
I_kwDODunzps5D6dcq
3,729
Wrong number of examples when loading a text dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/58376804?v=4", "events_url": "https://api.github.com/users/kg-nlp/events{/privacy}", "followers_url": "https://api.github.com/users/kg-nlp/followers", "following_url": "https://api.github.com/users/kg-nlp/following{/other_user}", "gists_url": "https://api.github.com/users/kg-nlp/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kg-nlp", "id": 58376804, "login": "kg-nlp", "node_id": "MDQ6VXNlcjU4Mzc2ODA0", "organizations_url": "https://api.github.com/users/kg-nlp/orgs", "received_events_url": "https://api.github.com/users/kg-nlp/received_events", "repos_url": "https://api.github.com/users/kg-nlp/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kg-nlp/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kg-nlp/subscriptions", "type": "User", "url": "https://api.github.com/users/kg-nlp" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
2
2022-02-16T01:13:31Z
2022-03-15T16:16:09Z
2022-03-15T16:16:09Z
NONE
null
null
null
## Describe the bug when I use load_dataset to read a txt file I find that the number of the samples is incorrect ## Steps to reproduce the bug ``` fr = open('train.txt','r',encoding='utf-8').readlines() print(len(fr)) # 1199637 datasets = load_dataset('text', data_files={'train': ['train.txt']}, streaming=False) print(len(datasets['train'])) # 1199649 ``` I also use command line operation to verify it ``` $ wc -l train.txt 1199637 train.txt ``` ## Expected results please fix that issue ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version: 1.8.3 - Platform:windows&linux - Python version:3.7 - PyArrow version:6.0.1
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3729/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3729/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3728
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3728/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3728/comments
https://api.github.com/repos/huggingface/datasets/issues/3728/events
https://github.com/huggingface/datasets/issues/3728
1,139,303,614
I_kwDODunzps5D6GS-
3,728
VoxPopuli
{ "avatar_url": "https://avatars.githubusercontent.com/u/16107619?v=4", "events_url": "https://api.github.com/users/VictorSanh/events{/privacy}", "followers_url": "https://api.github.com/users/VictorSanh/followers", "following_url": "https://api.github.com/users/VictorSanh/following{/other_user}", "gists_url": "https://api.github.com/users/VictorSanh/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/VictorSanh", "id": 16107619, "login": "VictorSanh", "node_id": "MDQ6VXNlcjE2MTA3NjE5", "organizations_url": "https://api.github.com/users/VictorSanh/orgs", "received_events_url": "https://api.github.com/users/VictorSanh/received_events", "repos_url": "https://api.github.com/users/VictorSanh/repos", "site_admin": false, "starred_url": "https://api.github.com/users/VictorSanh/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/VictorSanh/subscriptions", "type": "User", "url": "https://api.github.com/users/VictorSanh" }
[ { "color": "e99695", "default": false, "description": "Requesting to add a new dataset", "id": 2067376369, "name": "dataset request", "node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request" } ]
closed
false
null
[]
null
1
2022-02-15T23:04:55Z
2022-02-16T18:49:12Z
2022-02-16T18:49:12Z
MEMBER
null
null
null
## Adding a Dataset - **Name:** VoxPopuli - **Description:** A Large-Scale Multilingual Speech Corpus - **Paper:** https://arxiv.org/pdf/2101.00390.pdf - **Data:** https://github.com/facebookresearch/voxpopuli - **Motivation:** one of the largest (if not the largest) multilingual speech corpus: 400K hours of multilingual unlabeled speech + 17k hours of labeled speech Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md). 👀 @kahne @Molugan
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3728/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3728/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3727
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3727/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3727/comments
https://api.github.com/repos/huggingface/datasets/issues/3727/events
https://github.com/huggingface/datasets/pull/3727
1,138,979,732
PR_kwDODunzps4y34JN
3,727
Patch all module attributes in its namespace
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-15T17:12:27Z
2022-02-17T17:06:18Z
2022-02-17T17:06:17Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3727.diff", "html_url": "https://github.com/huggingface/datasets/pull/3727", "merged_at": "2022-02-17T17:06:17Z", "patch_url": "https://github.com/huggingface/datasets/pull/3727.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3727" }
When patching module attributes, only those defined in its `__all__` variable were considered by default (only falling back to `__dict__` if `__all__` was None). However those are only a subset of all the module attributes in its namespace (`__dict__` variable). This PR fixes the problem of modules that have non-None `__all__` variable, but try to access an attribute present in `__dict__` (and not in `__all__`). For example, `pandas` has attribute `__version__` only present in `__dict__`. - Before version 1.4, pandas `__all__` was None, thus all attributes in `__dict__` were patched - From version 1.4, pandas `__all__` is not None, thus attributes in `__dict__` not present in `__all__` are ignored Fix #3724. CC: @severo @lvwerra
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3727/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3727/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3726
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3726/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3726/comments
https://api.github.com/repos/huggingface/datasets/issues/3726/events
https://github.com/huggingface/datasets/pull/3726
1,138,870,362
PR_kwDODunzps4y3iSv
3,726
Use config pandas version in CSV dataset builder
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-15T15:47:49Z
2022-02-15T16:55:45Z
2022-02-15T16:55:44Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3726.diff", "html_url": "https://github.com/huggingface/datasets/pull/3726", "merged_at": "2022-02-15T16:55:44Z", "patch_url": "https://github.com/huggingface/datasets/pull/3726.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3726" }
Fix #3724.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3726/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3726/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3725
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3725/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3725/comments
https://api.github.com/repos/huggingface/datasets/issues/3725/events
https://github.com/huggingface/datasets/pull/3725
1,138,835,625
PR_kwDODunzps4y3bOG
3,725
Pin pandas to avoid bug in streaming mode
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-15T15:21:00Z
2022-02-15T15:52:38Z
2022-02-15T15:52:37Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3725.diff", "html_url": "https://github.com/huggingface/datasets/pull/3725", "merged_at": "2022-02-15T15:52:37Z", "patch_url": "https://github.com/huggingface/datasets/pull/3725.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3725" }
Temporarily pin pandas version to avoid bug in streaming mode (patching no longer works). Related to #3724.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3725/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3725/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3724
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3724/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3724/comments
https://api.github.com/repos/huggingface/datasets/issues/3724/events
https://github.com/huggingface/datasets/issues/3724
1,138,827,681
I_kwDODunzps5D4SGh
3,724
Bug while streaming CSV dataset with pandas 1.4
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
0
2022-02-15T15:16:19Z
2022-02-15T16:55:44Z
2022-02-15T16:55:44Z
MEMBER
null
null
null
## Describe the bug If we upgrade to pandas `1.4`, the patching of the pandas module is no longer working ``` AttributeError: '_PatchedModuleObj' object has no attribute '__version__' ``` ## Steps to reproduce the bug ``` pip install pandas==1.4 ``` ```python from datasets import load_dataset ds = load_dataset("lvwerra/red-wine", split="train", streaming=True) item = next(iter(ds)) item ```
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3724/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3724/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3723
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3723/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3723/comments
https://api.github.com/repos/huggingface/datasets/issues/3723/events
https://github.com/huggingface/datasets/pull/3723
1,138,789,493
PR_kwDODunzps4y3RuI
3,723
Fix flatten of complex feature types
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[]
closed
false
null
[]
null
2
2022-02-15T14:45:33Z
2022-03-18T17:32:26Z
2022-03-18T17:28:14Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3723.diff", "html_url": "https://github.com/huggingface/datasets/pull/3723", "merged_at": "2022-03-18T17:28:13Z", "patch_url": "https://github.com/huggingface/datasets/pull/3723.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3723" }
Fix `flatten` for the following feature types: Image/Audio, Translation, and TranslationVariableLanguages. Inspired by `cast`/`table_cast`, I've introduced a `table_flatten` function to handle the Image/Audio types. CC: @SBrandeis Fix #3686.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 2, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 2, "url": "https://api.github.com/repos/huggingface/datasets/issues/3723/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3723/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3722
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3722/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3722/comments
https://api.github.com/repos/huggingface/datasets/issues/3722/events
https://github.com/huggingface/datasets/pull/3722
1,138,770,211
PR_kwDODunzps4y3NrP
3,722
added electricity load diagram dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8100?v=4", "events_url": "https://api.github.com/users/kashif/events{/privacy}", "followers_url": "https://api.github.com/users/kashif/followers", "following_url": "https://api.github.com/users/kashif/following{/other_user}", "gists_url": "https://api.github.com/users/kashif/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/kashif", "id": 8100, "login": "kashif", "node_id": "MDQ6VXNlcjgxMDA=", "organizations_url": "https://api.github.com/users/kashif/orgs", "received_events_url": "https://api.github.com/users/kashif/received_events", "repos_url": "https://api.github.com/users/kashif/repos", "site_admin": false, "starred_url": "https://api.github.com/users/kashif/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/kashif/subscriptions", "type": "User", "url": "https://api.github.com/users/kashif" }
[]
closed
false
null
[]
null
0
2022-02-15T14:29:29Z
2022-02-16T18:53:21Z
2022-02-16T18:48:07Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3722.diff", "html_url": "https://github.com/huggingface/datasets/pull/3722", "merged_at": "2022-02-16T18:48:07Z", "patch_url": "https://github.com/huggingface/datasets/pull/3722.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3722" }
Initial Electricity Load Diagram time series dataset.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3722/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3722/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3721
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3721/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3721/comments
https://api.github.com/repos/huggingface/datasets/issues/3721/events
https://github.com/huggingface/datasets/pull/3721
1,137,617,108
PR_kwDODunzps4yzXCd
3,721
Multi-GPU support for `FaissIndex`
{ "avatar_url": "https://avatars.githubusercontent.com/u/32859905?v=4", "events_url": "https://api.github.com/users/rentruewang/events{/privacy}", "followers_url": "https://api.github.com/users/rentruewang/followers", "following_url": "https://api.github.com/users/rentruewang/following{/other_user}", "gists_url": "https://api.github.com/users/rentruewang/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/rentruewang", "id": 32859905, "login": "rentruewang", "node_id": "MDQ6VXNlcjMyODU5OTA1", "organizations_url": "https://api.github.com/users/rentruewang/orgs", "received_events_url": "https://api.github.com/users/rentruewang/received_events", "repos_url": "https://api.github.com/users/rentruewang/repos", "site_admin": false, "starred_url": "https://api.github.com/users/rentruewang/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/rentruewang/subscriptions", "type": "User", "url": "https://api.github.com/users/rentruewang" }
[]
closed
false
null
[]
null
5
2022-02-14T17:26:51Z
2022-03-07T16:28:57Z
2022-03-07T16:28:56Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3721.diff", "html_url": "https://github.com/huggingface/datasets/pull/3721", "merged_at": "2022-03-07T16:28:56Z", "patch_url": "https://github.com/huggingface/datasets/pull/3721.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3721" }
Per #3716 , current implementation does not take into consideration that `faiss` can run on multiple GPUs. In this commit, I provided multi-GPU support for `FaissIndex` by modifying the device management in `IndexableMixin.add_faiss_index` and `FaissIndex.load`. Now users are able to pass in 1. a positive integer (as usual) to use 1 GPU 2. a negative integer `-1` to use all GPUs 3. a list of integers e.g. `[0, 1]` to run only on those GPUs 4. Of course, passing in nothing still runs on CPU. This closes: #3716
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3721/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3721/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3720
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3720/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3720/comments
https://api.github.com/repos/huggingface/datasets/issues/3720/events
https://github.com/huggingface/datasets/issues/3720
1,137,537,080
I_kwDODunzps5DzXA4
3,720
Builder Configuration Update Required on Common Voice Dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/12482065?v=4", "events_url": "https://api.github.com/users/aasem/events{/privacy}", "followers_url": "https://api.github.com/users/aasem/followers", "following_url": "https://api.github.com/users/aasem/following{/other_user}", "gists_url": "https://api.github.com/users/aasem/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/aasem", "id": 12482065, "login": "aasem", "node_id": "MDQ6VXNlcjEyNDgyMDY1", "organizations_url": "https://api.github.com/users/aasem/orgs", "received_events_url": "https://api.github.com/users/aasem/received_events", "repos_url": "https://api.github.com/users/aasem/repos", "site_admin": false, "starred_url": "https://api.github.com/users/aasem/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/aasem/subscriptions", "type": "User", "url": "https://api.github.com/users/aasem" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
open
false
null
[]
null
7
2022-02-14T16:21:41Z
2022-02-15T14:31:27Z
null
NONE
null
null
null
Missing language in Common Voice dataset **Link:** https://huggingface.co/datasets/common_voice I tried to call the Urdu dataset using `load_dataset("common_voice", "ur", split="train+validation")` but couldn't due to builder configuration not found. I checked the source file here for the languages support: https://github.com/huggingface/datasets/blob/master/datasets/common_voice/common_voice.py and Urdu isn't included there. I assume a quick update will fix the issue as Urdu speech is now available at the Common Voice dataset. Am I the one who added this dataset? No
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3720/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3720/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3719
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3719/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3719/comments
https://api.github.com/repos/huggingface/datasets/issues/3719/events
https://github.com/huggingface/datasets/pull/3719
1,137,237,622
PR_kwDODunzps4yyFv7
3,719
Check if indices values in `Dataset.select` are within bounds
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[]
closed
false
null
[]
null
0
2022-02-14T12:31:41Z
2022-02-14T19:19:22Z
2022-02-14T19:19:22Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3719.diff", "html_url": "https://github.com/huggingface/datasets/pull/3719", "merged_at": "2022-02-14T19:19:21Z", "patch_url": "https://github.com/huggingface/datasets/pull/3719.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3719" }
Fix #3707 Instead of reusing `_check_valid_index_key` from `datasets.formatting`, I defined a new function to provide a more meaningful error message.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 1, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3719/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3719/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3718
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3718/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3718/comments
https://api.github.com/repos/huggingface/datasets/issues/3718/events
https://github.com/huggingface/datasets/pull/3718
1,137,196,388
PR_kwDODunzps4yx8r2
3,718
Fix Evidence Infer Treatment dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-14T11:58:07Z
2022-02-14T13:21:45Z
2022-02-14T13:21:44Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3718.diff", "html_url": "https://github.com/huggingface/datasets/pull/3718", "merged_at": "2022-02-14T13:21:43Z", "patch_url": "https://github.com/huggingface/datasets/pull/3718.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3718" }
This PR: - fixes a bug in the script, by removing an unnamed column with the row index: fix KeyError - fix the metadata JSON, by adding both configurations (1.1 and 2.0): fix ExpectedMoreDownloadedFiles - updates the dataset card Fix #3515.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3718/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3718/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3717
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3717/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3717/comments
https://api.github.com/repos/huggingface/datasets/issues/3717/events
https://github.com/huggingface/datasets/issues/3717
1,137,183,015
I_kwDODunzps5DyAkn
3,717
wrong condition in `Features ClassLabel encode_example`
{ "avatar_url": "https://avatars.githubusercontent.com/u/56633664?v=4", "events_url": "https://api.github.com/users/Tudyx/events{/privacy}", "followers_url": "https://api.github.com/users/Tudyx/followers", "following_url": "https://api.github.com/users/Tudyx/following{/other_user}", "gists_url": "https://api.github.com/users/Tudyx/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Tudyx", "id": 56633664, "login": "Tudyx", "node_id": "MDQ6VXNlcjU2NjMzNjY0", "organizations_url": "https://api.github.com/users/Tudyx/orgs", "received_events_url": "https://api.github.com/users/Tudyx/received_events", "repos_url": "https://api.github.com/users/Tudyx/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Tudyx/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Tudyx/subscriptions", "type": "User", "url": "https://api.github.com/users/Tudyx" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
1
2022-02-14T11:44:35Z
2022-02-14T15:09:36Z
2022-02-14T15:07:43Z
NONE
null
null
null
## Describe the bug The `encode_example` function in *features.py* seems to have a wrong condition. ```python if not -1 <= example_data < self.num_classes: raise ValueError(f"Class label {example_data:d} greater than configured num_classes {self.num_classes}") ``` ## Expected results The `not - 1` condition change the result of the condition. For instance, if `example_data` equals 4 and ` self.num_classes` equals 4 too, `example_data < self.num_classes` will give `False` as expected . But if i add the `not - 1` condition, `not -1 <= example_data < self.num_classes` will give `True` and raise an exception. ## Environment info - `datasets` version: 1.18.3 - Python version: 3.8.10 - PyArrow version: 7.00
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3717/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3717/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3716
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3716/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3716/comments
https://api.github.com/repos/huggingface/datasets/issues/3716/events
https://github.com/huggingface/datasets/issues/3716
1,136,831,092
I_kwDODunzps5Dwqp0
3,716
`FaissIndex` to support multiple GPU and `custom_index`
{ "avatar_url": "https://avatars.githubusercontent.com/u/32859905?v=4", "events_url": "https://api.github.com/users/rentruewang/events{/privacy}", "followers_url": "https://api.github.com/users/rentruewang/followers", "following_url": "https://api.github.com/users/rentruewang/following{/other_user}", "gists_url": "https://api.github.com/users/rentruewang/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/rentruewang", "id": 32859905, "login": "rentruewang", "node_id": "MDQ6VXNlcjMyODU5OTA1", "organizations_url": "https://api.github.com/users/rentruewang/orgs", "received_events_url": "https://api.github.com/users/rentruewang/received_events", "repos_url": "https://api.github.com/users/rentruewang/repos", "site_admin": false, "starred_url": "https://api.github.com/users/rentruewang/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/rentruewang/subscriptions", "type": "User", "url": "https://api.github.com/users/rentruewang" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" } ]
closed
false
null
[]
null
2
2022-02-14T06:21:43Z
2022-03-07T16:28:56Z
2022-03-07T16:28:56Z
CONTRIBUTOR
null
null
null
**Is your feature request related to a problem? Please describe.** Currently, because `device` is of the type `int | None`, to leverage `faiss-gpu`'s multi-gpu support, you need to create a `custom_index`. However, if using a `custom_index` created by e.g. `faiss.index_cpu_to_all_gpus`, then `FaissIndex.save` does not work properly because it checks the device id (which is an int, so no multiple GPUs). **Describe the solution you'd like** I would like `FaissIndex` to support multiple GPUs, by passing in a list to `add_faiss_index`. **Describe alternatives you've considered** Alternatively, I would like it to at least provide a warning cause it wasn't the behavior that I expected. **Additional context** Relavent source code here: https://github.com/huggingface/datasets/blob/6ed6ac9448311930557810383d2cfd4fe6aae269/src/datasets/search.py#L340-L349 Device management needs changing to support multiple GPUs, probably by `isinstance` calls. I can provide a PR if you like :) Thanks for reading!
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3716/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3716/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3715
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3715/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3715/comments
https://api.github.com/repos/huggingface/datasets/issues/3715/events
https://github.com/huggingface/datasets/pull/3715
1,136,107,879
PR_kwDODunzps4yuKJj
3,715
Fix bugs in msr_sqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/47296835?v=4", "events_url": "https://api.github.com/users/Timothyxxx/events{/privacy}", "followers_url": "https://api.github.com/users/Timothyxxx/followers", "following_url": "https://api.github.com/users/Timothyxxx/following{/other_user}", "gists_url": "https://api.github.com/users/Timothyxxx/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Timothyxxx", "id": 47296835, "login": "Timothyxxx", "node_id": "MDQ6VXNlcjQ3Mjk2ODM1", "organizations_url": "https://api.github.com/users/Timothyxxx/orgs", "received_events_url": "https://api.github.com/users/Timothyxxx/received_events", "repos_url": "https://api.github.com/users/Timothyxxx/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Timothyxxx/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Timothyxxx/subscriptions", "type": "User", "url": "https://api.github.com/users/Timothyxxx" }
[ { "color": "0e8a16", "default": false, "description": "Contribution to a dataset script", "id": 4564477500, "name": "dataset contribution", "node_id": "LA_kwDODunzps8AAAABEBBmPA", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20contribution" } ]
closed
false
null
[]
null
5
2022-02-13T16:37:30Z
2022-10-03T09:10:02Z
2022-10-03T09:08:06Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3715.diff", "html_url": "https://github.com/huggingface/datasets/pull/3715", "merged_at": "2022-10-03T09:08:06Z", "patch_url": "https://github.com/huggingface/datasets/pull/3715.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3715" }
The last version has many problems, 1) Errors in table load-in. Split by a single comma instead of using pandas is wrong. 2) id reduplicated in _generate_examples function. 3) Missing information of history questions which make it hard to use. I fix it refer to https://github.com/HKUNLP/UnifiedSKG. And we test it to perform normally.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3715/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3715/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3714
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3714/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3714/comments
https://api.github.com/repos/huggingface/datasets/issues/3714/events
https://github.com/huggingface/datasets/issues/3714
1,136,105,530
I_kwDODunzps5Dt5g6
3,714
tatoeba_mt: File not found error and key error
{ "avatar_url": "https://avatars.githubusercontent.com/u/614718?v=4", "events_url": "https://api.github.com/users/jorgtied/events{/privacy}", "followers_url": "https://api.github.com/users/jorgtied/followers", "following_url": "https://api.github.com/users/jorgtied/following{/other_user}", "gists_url": "https://api.github.com/users/jorgtied/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/jorgtied", "id": 614718, "login": "jorgtied", "node_id": "MDQ6VXNlcjYxNDcxOA==", "organizations_url": "https://api.github.com/users/jorgtied/orgs", "received_events_url": "https://api.github.com/users/jorgtied/received_events", "repos_url": "https://api.github.com/users/jorgtied/repos", "site_admin": false, "starred_url": "https://api.github.com/users/jorgtied/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/jorgtied/subscriptions", "type": "User", "url": "https://api.github.com/users/jorgtied" }
[ { "color": "E5583E", "default": false, "description": "Related to the dataset viewer on huggingface.co", "id": 3470211881, "name": "dataset-viewer", "node_id": "LA_kwDODunzps7O1zsp", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset-viewer" } ]
closed
false
null
[]
null
1
2022-02-13T16:35:45Z
2022-02-13T20:44:04Z
2022-02-13T20:44:04Z
NONE
null
null
null
## Dataset viewer issue for 'tatoeba_mt' **Link:** https://huggingface.co/datasets/Helsinki-NLP/tatoeba_mt My data loader script does not seem to work. The files are part of the local repository but cannot be found. An example where it should work is the subset for "afr-eng". Another problem is that I do not have validation data for all subsets and I don't know how to properly check whether validation exists in the configuration before I try to download it. An example is the subset for "afr-deu". Am I the one who added this dataset ? Yes
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3714/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3714/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3713
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3713/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3713/comments
https://api.github.com/repos/huggingface/datasets/issues/3713/events
https://github.com/huggingface/datasets/pull/3713
1,135,692,572
PR_kwDODunzps4yso6D
3,713
Rm sphinx doc
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
[]
closed
false
null
[]
null
2
2022-02-13T11:26:31Z
2022-02-17T10:18:46Z
2022-02-17T10:12:09Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3713.diff", "html_url": "https://github.com/huggingface/datasets/pull/3713", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3713.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3713" }
Checklist - [x] Update circle ci yaml - [x] Delete sphinx static & python files in docs dir - [x] Update readme in docs dir - [ ] Update docs config in setup.py
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3713/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3713/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3712
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3712/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3712/comments
https://api.github.com/repos/huggingface/datasets/issues/3712/events
https://github.com/huggingface/datasets/pull/3712
1,134,252,505
PR_kwDODunzps4ynVYy
3,712
Fix the error of msr_sqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/47296835?v=4", "events_url": "https://api.github.com/users/Timothyxxx/events{/privacy}", "followers_url": "https://api.github.com/users/Timothyxxx/followers", "following_url": "https://api.github.com/users/Timothyxxx/following{/other_user}", "gists_url": "https://api.github.com/users/Timothyxxx/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Timothyxxx", "id": 47296835, "login": "Timothyxxx", "node_id": "MDQ6VXNlcjQ3Mjk2ODM1", "organizations_url": "https://api.github.com/users/Timothyxxx/orgs", "received_events_url": "https://api.github.com/users/Timothyxxx/received_events", "repos_url": "https://api.github.com/users/Timothyxxx/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Timothyxxx/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Timothyxxx/subscriptions", "type": "User", "url": "https://api.github.com/users/Timothyxxx" }
[]
closed
false
null
[]
null
0
2022-02-12T16:27:54Z
2022-02-13T11:21:05Z
2022-02-13T11:21:05Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3712.diff", "html_url": "https://github.com/huggingface/datasets/pull/3712", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3712.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3712" }
Fix the error of _load_table_data function in msr_sqa dataset, it is wrong to use comma to split each row.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3712/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3712/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3711
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3711/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3711/comments
https://api.github.com/repos/huggingface/datasets/issues/3711/events
https://github.com/huggingface/datasets/pull/3711
1,134,050,545
PR_kwDODunzps4ymmlK
3,711
Fix the error of _load_table_data function in msr_sqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/47296835?v=4", "events_url": "https://api.github.com/users/Timothyxxx/events{/privacy}", "followers_url": "https://api.github.com/users/Timothyxxx/followers", "following_url": "https://api.github.com/users/Timothyxxx/following{/other_user}", "gists_url": "https://api.github.com/users/Timothyxxx/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Timothyxxx", "id": 47296835, "login": "Timothyxxx", "node_id": "MDQ6VXNlcjQ3Mjk2ODM1", "organizations_url": "https://api.github.com/users/Timothyxxx/orgs", "received_events_url": "https://api.github.com/users/Timothyxxx/received_events", "repos_url": "https://api.github.com/users/Timothyxxx/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Timothyxxx/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Timothyxxx/subscriptions", "type": "User", "url": "https://api.github.com/users/Timothyxxx" }
[]
closed
false
null
[]
null
0
2022-02-12T13:20:53Z
2022-02-12T13:30:43Z
2022-02-12T13:30:43Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3711.diff", "html_url": "https://github.com/huggingface/datasets/pull/3711", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3711.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3711" }
The _load_table_data function from the last version is wrong, it is wrong to use comma to split each row.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3711/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3711/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3710
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3710/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3710/comments
https://api.github.com/repos/huggingface/datasets/issues/3710/events
https://github.com/huggingface/datasets/pull/3710
1,133,955,393
PR_kwDODunzps4ymQMQ
3,710
Fix CI code quality issue
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-12T12:05:39Z
2022-02-12T12:58:05Z
2022-02-12T12:58:04Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3710.diff", "html_url": "https://github.com/huggingface/datasets/pull/3710", "merged_at": "2022-02-12T12:58:04Z", "patch_url": "https://github.com/huggingface/datasets/pull/3710.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3710" }
Fix CI code quality issue introduced by #3695.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3710/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3710/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3709
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3709/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3709/comments
https://api.github.com/repos/huggingface/datasets/issues/3709/events
https://github.com/huggingface/datasets/pull/3709
1,132,997,904
PR_kwDODunzps4yi0J4
3,709
Set base path to hub url for canonical datasets
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
1
2022-02-11T19:23:20Z
2022-02-16T14:02:28Z
2022-02-16T14:02:27Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3709.diff", "html_url": "https://github.com/huggingface/datasets/pull/3709", "merged_at": "2022-02-16T14:02:27Z", "patch_url": "https://github.com/huggingface/datasets/pull/3709.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3709" }
This should allow canonical datasets to use relative paths to download data files from the Hub cc @polinaeterna this will be useful if we have audio datasets that are canonical and for which you'd like to host data files
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 2, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/3709/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3709/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3708
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3708/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3708/comments
https://api.github.com/repos/huggingface/datasets/issues/3708/events
https://github.com/huggingface/datasets/issues/3708
1,132,968,402
I_kwDODunzps5Dh7nS
3,708
Loading JSON gets stuck with many workers/threads
{ "avatar_url": "https://avatars.githubusercontent.com/u/8264887?v=4", "events_url": "https://api.github.com/users/lvwerra/events{/privacy}", "followers_url": "https://api.github.com/users/lvwerra/followers", "following_url": "https://api.github.com/users/lvwerra/following{/other_user}", "gists_url": "https://api.github.com/users/lvwerra/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lvwerra", "id": 8264887, "login": "lvwerra", "node_id": "MDQ6VXNlcjgyNjQ4ODc=", "organizations_url": "https://api.github.com/users/lvwerra/orgs", "received_events_url": "https://api.github.com/users/lvwerra/received_events", "repos_url": "https://api.github.com/users/lvwerra/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lvwerra/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lvwerra/subscriptions", "type": "User", "url": "https://api.github.com/users/lvwerra" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
open
false
null
[]
null
8
2022-02-11T18:50:48Z
2023-06-16T11:24:12Z
null
MEMBER
null
null
null
## Describe the bug Loading a JSON dataset with `load_dataset` can get stuck when running on a machine with many CPUs. This is especially an issue when loading a large dataset on a large machine. ## Steps to reproduce the bug I originally created the following script to reproduce the issue: ```python from datasets import load_dataset from multiprocessing import Process from tqdm import tqdm import datasets from transformers import set_seed def run_tasks_in_parallel(tasks, ds_list): for _ in tqdm(range(1000)): print('new batch') running_tasks = [Process(target=task, args=(ds, i)) for i, (task, ds) in enumerate(zip(tasks, ds_list))] for running_task in running_tasks: running_task.start() for running_task in running_tasks: running_task.join() def get_dataset(): dataset_name = 'transformersbook/codeparrot' ds = load_dataset(dataset_name+'-train', split="train", streaming=True) ds = ds.shuffle(buffer_size=1000, seed=1) return iter(ds) def get_next_element(ds, process_id, N=10000): for _ in range(N): _ = next(ds)['content'] print(f'process {process_id} done') return set_seed(1) datasets.utils.logging.set_verbosity_debug() n_processes = 8 tasks = [get_next_element for _ in range(n_processes)] args = [get_dataset() for _ in range(n_processes)] run_tasks_in_parallel(tasks, args) ``` Today I noticed that it can happen when running it on a single process on a machine with many cores without streaming. So just `load_dataset("transformersbook/codeparrot-train")` alone might cause the issue after waiting long enough or trying many times. It's a slightly random process which makes it especially hard to track down. When I encountered it today it had already processed 17GB of data (the size of the cache folder when it got stuck) before getting stuck. Here's my current understanding of the error. As far as I can tell it happens in the following block: https://github.com/huggingface/datasets/blob/be701e9e89ab38022612c7263edc015bc7feaff9/src/datasets/packaged_modules/json/json.py#L119-L139 When the try on line 121 fails and the `block_size` is increased it can happen that it can't read the JSON again and gets stuck indefinitely. A hint that points in that direction is that increasing the `chunksize` argument decreases the chance of getting stuck and vice versa. Maybe it is an issue with a lock on the file that is not properly released. ## Expected results Read a JSON before the end of the universe. ## Actual results Read a JSON not before the end of the universe. ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version: 1.18.3 - Platform: Linux-4.19.0-18-cloud-amd64-x86_64-with-glibc2.28 - Python version: 3.9.10 - PyArrow version: 7.0.0 @lhoestq we dicsussed this a while ago. @albertvillanova we discussed this today :)
{ "+1": 3, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/3708/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3708/timeline
null
null
false
https://api.github.com/repos/huggingface/datasets/issues/3707
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3707/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3707/comments
https://api.github.com/repos/huggingface/datasets/issues/3707/events
https://github.com/huggingface/datasets/issues/3707
1,132,741,903
I_kwDODunzps5DhEUP
3,707
`.select`: unexpected behavior with `indices`
{ "avatar_url": "https://avatars.githubusercontent.com/u/36087158?v=4", "events_url": "https://api.github.com/users/gabegma/events{/privacy}", "followers_url": "https://api.github.com/users/gabegma/followers", "following_url": "https://api.github.com/users/gabegma/following{/other_user}", "gists_url": "https://api.github.com/users/gabegma/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/gabegma", "id": 36087158, "login": "gabegma", "node_id": "MDQ6VXNlcjM2MDg3MTU4", "organizations_url": "https://api.github.com/users/gabegma/orgs", "received_events_url": "https://api.github.com/users/gabegma/received_events", "repos_url": "https://api.github.com/users/gabegma/repos", "site_admin": false, "starred_url": "https://api.github.com/users/gabegma/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/gabegma/subscriptions", "type": "User", "url": "https://api.github.com/users/gabegma" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" } ]
null
2
2022-02-11T15:20:01Z
2022-02-14T19:19:21Z
2022-02-14T19:19:21Z
NONE
null
null
null
## Describe the bug The `.select` method will not throw when sending `indices` bigger than the dataset length; `indices` will be wrapped instead. This behavior is not documented anywhere, and is not intuitive. ## Steps to reproduce the bug ```python from datasets import Dataset ds = Dataset.from_dict({"text": ["d", "e", "f"], "label": [4, 5, 6]}) res1 = ds.select([1, 2, 3])['text'] res2 = ds.select([1000])['text'] ``` ## Expected results Both results should throw an `Error`. ## Actual results `res1` will give `['e', 'f', 'd']` `res2` will give `['e']` ## Environment info Bug found from this environment: - `datasets` version: 1.16.1 - Platform: macOS-10.16-x86_64-i386-64bit - Python version: 3.8.7 - PyArrow version: 6.0.1 It was also replicated on `master`.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3707/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3707/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3706
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3706/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3706/comments
https://api.github.com/repos/huggingface/datasets/issues/3706/events
https://github.com/huggingface/datasets/issues/3706
1,132,218,874
I_kwDODunzps5DfEn6
3,706
Unable to load dataset 'big_patent'
{ "avatar_url": "https://avatars.githubusercontent.com/u/26432753?v=4", "events_url": "https://api.github.com/users/ankitk2109/events{/privacy}", "followers_url": "https://api.github.com/users/ankitk2109/followers", "following_url": "https://api.github.com/users/ankitk2109/following{/other_user}", "gists_url": "https://api.github.com/users/ankitk2109/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/ankitk2109", "id": 26432753, "login": "ankitk2109", "node_id": "MDQ6VXNlcjI2NDMyNzUz", "organizations_url": "https://api.github.com/users/ankitk2109/orgs", "received_events_url": "https://api.github.com/users/ankitk2109/received_events", "repos_url": "https://api.github.com/users/ankitk2109/repos", "site_admin": false, "starred_url": "https://api.github.com/users/ankitk2109/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/ankitk2109/subscriptions", "type": "User", "url": "https://api.github.com/users/ankitk2109" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
5
2022-02-11T09:48:34Z
2022-02-14T15:26:03Z
2022-02-14T15:26:03Z
NONE
null
null
null
## Describe the bug Unable to load the "big_patent" dataset ## Steps to reproduce the bug ```python load_dataset('big_patent', 'd', 'validation') ``` ## Expected results Download big_patents' validation split from the 'd' subset ## Getting an error saying: {FileNotFoundError}Local file ..\huggingface\datasets\downloads\6159313604f4f2c01e7d1cac52139343b6c07f73f6de348d09be6213478455c5\bigPatentData\train.tar.gz doesn't exist ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version:1.18.3 - Platform: Windows - Python version:3.8 - PyArrow version:7.0.0
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3706/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3706/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3705
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3705/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3705/comments
https://api.github.com/repos/huggingface/datasets/issues/3705/events
https://github.com/huggingface/datasets/pull/3705
1,132,053,226
PR_kwDODunzps4yfhyj
3,705
Raise informative error when loading a save_to_disk dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-11T08:21:03Z
2022-02-11T22:56:40Z
2022-02-11T22:56:39Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3705.diff", "html_url": "https://github.com/huggingface/datasets/pull/3705", "merged_at": "2022-02-11T22:56:39Z", "patch_url": "https://github.com/huggingface/datasets/pull/3705.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3705" }
People recurrently report error when trying to load a dataset (using `load_dataset`) that was previously saved using `save_to_disk`. This PR raises an informative error message telling them they should use `load_from_disk` instead. Close #3700.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3705/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3705/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3704
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3704/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3704/comments
https://api.github.com/repos/huggingface/datasets/issues/3704/events
https://github.com/huggingface/datasets/issues/3704
1,132,042,631
I_kwDODunzps5DeZmH
3,704
OSCAR-2109 datasets are misaligned and truncated
{ "avatar_url": "https://avatars.githubusercontent.com/u/5794899?v=4", "events_url": "https://api.github.com/users/adrianeboyd/events{/privacy}", "followers_url": "https://api.github.com/users/adrianeboyd/followers", "following_url": "https://api.github.com/users/adrianeboyd/following{/other_user}", "gists_url": "https://api.github.com/users/adrianeboyd/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/adrianeboyd", "id": 5794899, "login": "adrianeboyd", "node_id": "MDQ6VXNlcjU3OTQ4OTk=", "organizations_url": "https://api.github.com/users/adrianeboyd/orgs", "received_events_url": "https://api.github.com/users/adrianeboyd/received_events", "repos_url": "https://api.github.com/users/adrianeboyd/repos", "site_admin": false, "starred_url": "https://api.github.com/users/adrianeboyd/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/adrianeboyd/subscriptions", "type": "User", "url": "https://api.github.com/users/adrianeboyd" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
10
2022-02-11T08:14:59Z
2022-03-17T18:01:04Z
2022-03-16T16:21:28Z
NONE
null
null
null
## Describe the bug The `oscar-corpus/OSCAR-2109` data appears to be misaligned and truncated by the dataset builder for subsets that contain more than one part and for cases where the texts contain non-unix newlines. ## Steps to reproduce the bug A few examples, although I'm not sure how deterministic the particular (mis)alignment is in various configurations: ```python from datasets import load_dataset dataset = load_dataset("oscar-corpus/OSCAR-2109", "deduplicated_fi", split="train", use_auth_token=True) entry = dataset[0] # entry["text"] is from fi_part_3.txt.gz # entry["meta"] is from fi_meta_part_2.jsonl.gz dataset = load_dataset("oscar-corpus/OSCAR-2109", "deduplicated_no", split="train", use_auth_token=True) entry = dataset[900000] # entry["text"] is from no_part_3.txt.gz and contains a blank line # entry["meta"] is from no_meta_part_1.jsonl.gz dataset = load_dataset("oscar-corpus/OSCAR-2109", "deduplicated_mk", split="train", streaming=True, use_auth_token=True) # 9088 texts in the dataset are empty ``` For `deduplicated_fi`, all exported raw texts from the dataset are 17GB rather than 20GB as reported in the data splits overview table. The token count with `wc -w` for the raw texts is 2,067,556,874 rather than the expected 2,357,264,196 from the data splits table. For `deduplicated_no` all exported raw texts contain 624,040,887 rather than the expected 776,354,517 tokens. For `deduplicated_mk` it is 122,236,936 rather than 134,544,934 tokens. I'm not expecting the `wc -w` counts to line up exactly with the data splits table, but for comparison the `wc -w` count for `deduplicated_mk` on the raw texts is 134,545,424. ## Issues * The meta / text files are not paired correctly when loading, so the extracted texts do not have the right offsets, the metadata is not associated with the correct text, and the text files may not be processed to the end or may be processed beyond the end (empty texts). * The line count offset is not reset per file so the texts aren't aligned to the right offsets in any parts beyond the first part, leading to truncation when in effect blank lines are not skipped. * Non-unix newline characters are treated as newlines when reading the text files while the metadata only counts unix newlines for its line offsets, leading to further misalignments between the metadata and the extracted texts, and which also results in truncation. ## Expected results All texts from the OSCAR release are extracted according to the metadata and aligned with the correct metadata. ## Fixes Not necessarily the exact fixes/checks you may want to use (I didn't test all languages or do any cross-platform testing, I'm not sure all the details are compatible with streaming), however to highlight the issues: ```diff diff --git a/OSCAR-2109.py b/OSCAR-2109.py index bbac1076..5eee8de7 100644 --- a/OSCAR-2109.py +++ b/OSCAR-2109.py @@ -20,6 +20,7 @@ import collections import gzip import json +import os import datasets @@ -387,9 +388,20 @@ class Oscar2109(datasets.GeneratorBasedBuilder): with open(checksum_file, encoding="utf-8") as f: data_filenames = [line.split()[1] for line in f if line] data_urls = [self.config.base_data_path + data_filename for data_filename in data_filenames] - text_files = dl_manager.download([url for url in data_urls if url.endswith(".txt.gz")]) - metadata_files = dl_manager.download([url for url in data_urls if url.endswith(".jsonl.gz")]) + # sort filenames so corresponding parts are aligned + text_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(".txt.gz")])) + metadata_files = sorted(dl_manager.download([url for url in data_urls if url.endswith(".jsonl.gz")])) + assert len(text_files) == len(metadata_files) metadata_and_text_files = list(zip(metadata_files, text_files)) + for meta_path, text_path in metadata_and_text_files: + # check that meta/text part numbers are the same + if "part" in os.path.basename(text_path): + assert ( + os.path.basename(text_path).replace(".txt.gz", "").split("_")[-1] + == os.path.basename(meta_path).replace(".jsonl.gz", "").split("_")[-1] + ) + else: + assert len(metadata_and_text_files) == 1 return [ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"metadata_and_text_files": metadata_and_text_files}), ] @@ -397,10 +409,14 @@ class Oscar2109(datasets.GeneratorBasedBuilder): def _generate_examples(self, metadata_and_text_files): """This function returns the examples in the raw (text) form by iterating on all the files.""" id_ = 0 - offset = 0 for meta_path, text_path in metadata_and_text_files: + # line offsets are per text file + offset = 0 logger.info("generating examples from = %s", text_path) - with gzip.open(open(text_path, "rb"), "rt", encoding="utf-8") as text_f: + # some texts contain non-Unix newlines that should not be + # interpreted as line breaks for the line counts in the metadata + # with readline() + with gzip.open(open(text_path, "rb"), "rt", encoding="utf-8", newline="\n") as text_f: with gzip.open(open(meta_path, "rb"), "rt", encoding="utf-8") as meta_f: for line in meta_f: # read meta @@ -411,7 +427,12 @@ class Oscar2109(datasets.GeneratorBasedBuilder): offset += 1 text_f.readline() # read text - text = "".join([text_f.readline() for _ in range(meta["nb_sentences"])]).rstrip() + text_lines = [text_f.readline() for _ in range(meta["nb_sentences"])] + # all lines contain text (no blank lines or EOF) + assert all(text_lines) + assert "\n" not in text_lines offset += meta["nb_sentences"] + # only strip the trailing newline + text = "".join(text_lines).rstrip("\n") yield id_, {"id": id_, "text": text, "meta": meta} id_ += 1 ``` I've tested this with a number of smaller deduplicated languages with 1-20 parts and the resulting datasets looked correct in terms of word count and size when compared to the data splits table and raw texts, and the text/metadata alignments were correct in all my spot checks. However, there are many many languages I didn't test and I'm not sure that there aren't any texts containing blank lines in the corpus, for instance. For the cases I tested, the assertions related to blank lines and EOF made it easier to verify that the text and metadata were aligned as intended, since there would be little chance of spurious alignments of variable-length texts across so much data.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3704/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3704/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3703
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3703/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3703/comments
https://api.github.com/repos/huggingface/datasets/issues/3703/events
https://github.com/huggingface/datasets/issues/3703
1,131,882,772
I_kwDODunzps5DdykU
3,703
ImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'
{ "avatar_url": "https://avatars.githubusercontent.com/u/28425091?v=4", "events_url": "https://api.github.com/users/zhangyifei1/events{/privacy}", "followers_url": "https://api.github.com/users/zhangyifei1/followers", "following_url": "https://api.github.com/users/zhangyifei1/following{/other_user}", "gists_url": "https://api.github.com/users/zhangyifei1/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/zhangyifei1", "id": 28425091, "login": "zhangyifei1", "node_id": "MDQ6VXNlcjI4NDI1MDkx", "organizations_url": "https://api.github.com/users/zhangyifei1/orgs", "received_events_url": "https://api.github.com/users/zhangyifei1/received_events", "repos_url": "https://api.github.com/users/zhangyifei1/repos", "site_admin": false, "starred_url": "https://api.github.com/users/zhangyifei1/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/zhangyifei1/subscriptions", "type": "User", "url": "https://api.github.com/users/zhangyifei1" }
[]
closed
false
null
[]
null
9
2022-02-11T06:38:42Z
2023-07-11T09:31:59Z
2023-07-11T09:31:59Z
NONE
null
null
null
hi : I want to use the seqeval indicator because of direct load_ When metric ('seqeval '), it will prompt that the network connection fails. So I downloaded the seqeval Py to load locally. Loading code: metric = load_ metric(path='mymetric/seqeval/seqeval.py') But tips: Traceback (most recent call last): File "/home/ubuntu/Python3.6_project/zyf_project/transformers/examples/pytorch/token-classification/run_ner.py", line 604, in <module> main() File "/home/ubuntu/Python3.6_project/zyf_project/transformers/examples/pytorch/token-classification/run_ner.py", line 481, in main metric = load_metric(path='mymetric/seqeval/seqeval.py') File "/home/ubuntu/Python3.6_project/zyf_project/transformers_venv_0209/lib/python3.7/site-packages/datasets/load.py", line 610, in load_metric dataset=False, File "/home/ubuntu/Python3.6_project/zyf_project/transformers_venv_0209/lib/python3.7/site-packages/datasets/load.py", line 450, in prepare_module f"To be able to use this {module_type}, you need to install the following dependencies" ImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance' **What should I do? Please help me, thank you**
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3703/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3703/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3702
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3702/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3702/comments
https://api.github.com/repos/huggingface/datasets/issues/3702/events
https://github.com/huggingface/datasets/pull/3702
1,130,666,707
PR_kwDODunzps4yahKc
3,702
Update data URL of lm1b dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/7105134?v=4", "events_url": "https://api.github.com/users/yazdanbakhsh/events{/privacy}", "followers_url": "https://api.github.com/users/yazdanbakhsh/followers", "following_url": "https://api.github.com/users/yazdanbakhsh/following{/other_user}", "gists_url": "https://api.github.com/users/yazdanbakhsh/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/yazdanbakhsh", "id": 7105134, "login": "yazdanbakhsh", "node_id": "MDQ6VXNlcjcxMDUxMzQ=", "organizations_url": "https://api.github.com/users/yazdanbakhsh/orgs", "received_events_url": "https://api.github.com/users/yazdanbakhsh/received_events", "repos_url": "https://api.github.com/users/yazdanbakhsh/repos", "site_admin": false, "starred_url": "https://api.github.com/users/yazdanbakhsh/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/yazdanbakhsh/subscriptions", "type": "User", "url": "https://api.github.com/users/yazdanbakhsh" }
[ { "color": "0e8a16", "default": false, "description": "Contribution to a dataset script", "id": 4564477500, "name": "dataset contribution", "node_id": "LA_kwDODunzps8AAAABEBBmPA", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20contribution" } ]
closed
false
null
[]
null
2
2022-02-10T18:46:30Z
2022-09-23T11:52:39Z
2022-09-23T11:52:39Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3702.diff", "html_url": "https://github.com/huggingface/datasets/pull/3702", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3702.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3702" }
The http address doesn't work anymore
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3702/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3702/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3701
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3701/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3701/comments
https://api.github.com/repos/huggingface/datasets/issues/3701/events
https://github.com/huggingface/datasets/pull/3701
1,130,498,738
PR_kwDODunzps4yZ8Dw
3,701
Pin ElasticSearch
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
0
2022-02-10T17:15:26Z
2022-02-10T17:31:13Z
2022-02-10T17:31:12Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3701.diff", "html_url": "https://github.com/huggingface/datasets/pull/3701", "merged_at": "2022-02-10T17:31:12Z", "patch_url": "https://github.com/huggingface/datasets/pull/3701.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3701" }
Until we manage to support ES 8.0, I'm setting the version to `<8.0.0` Currently we're getting this error on 8.0: ```python ValueError: Either 'hosts' or 'cloud_id' must be specified ``` When instantiating a `Elasticsearch()` object
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3701/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3701/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3700
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3700/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3700/comments
https://api.github.com/repos/huggingface/datasets/issues/3700/events
https://github.com/huggingface/datasets/issues/3700
1,130,252,496
I_kwDODunzps5DXkjQ
3,700
Unable to load a dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/97964230?v=4", "events_url": "https://api.github.com/users/PaulchauvinAI/events{/privacy}", "followers_url": "https://api.github.com/users/PaulchauvinAI/followers", "following_url": "https://api.github.com/users/PaulchauvinAI/following{/other_user}", "gists_url": "https://api.github.com/users/PaulchauvinAI/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/PaulchauvinAI", "id": 97964230, "login": "PaulchauvinAI", "node_id": "U_kgDOBdbQxg", "organizations_url": "https://api.github.com/users/PaulchauvinAI/orgs", "received_events_url": "https://api.github.com/users/PaulchauvinAI/received_events", "repos_url": "https://api.github.com/users/PaulchauvinAI/repos", "site_admin": false, "starred_url": "https://api.github.com/users/PaulchauvinAI/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/PaulchauvinAI/subscriptions", "type": "User", "url": "https://api.github.com/users/PaulchauvinAI" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
null
[]
null
2
2022-02-10T15:05:53Z
2022-02-11T22:56:39Z
2022-02-11T22:56:39Z
NONE
null
null
null
## Describe the bug Unable to load a dataset from Huggingface that I have just saved. ## Steps to reproduce the bug On Google colab `! pip install datasets ` `from datasets import load_dataset` `my_path = "wiki_dataset"` `dataset = load_dataset('wikipedia', "20200501.fr")` `dataset.save_to_disk(my_path)` `dataset = load_dataset(my_path)` ## Expected results Loading the dataset ## Actual results ValueError: Couldn't cast _data_files: list<item: struct<filename: string>> child 0, item: struct<filename: string> child 0, filename: string _fingerprint: string _format_columns: null _format_kwargs: struct<> _format_type: null _indexes: struct<> _output_all_columns: bool _split: string to {'builder_name': Value(dtype='string', id=None), 'citation': Value(dtype='string', id=None), 'config_name': Value(dtype='string', id=None), 'dataset_size': Value(dtype='int64', id=None), 'description': Value(dtype='string', id=None), 'download_checksums': {}, 'download_size': Value(dtype='int64', id=None), 'features': {'title': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}, 'text': {'dtype': Value(dtype='string', id=None), 'id': Value(dtype='null', id=None), '_type': Value(dtype='string', id=None)}}, 'homepage': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'post_processed': Value(dtype='null', id=None), 'post_processing_size': Value(dtype='null', id=None), 'size_in_bytes': Value(dtype='int64', id=None), 'splits': {'train': {'name': Value(dtype='string', id=None), 'num_bytes': Value(dtype='int64', id=None), 'num_examples': Value(dtype='int64', id=None), 'dataset_name': Value(dtype='string', id=None)}}, 'supervised_keys': Value(dtype='null', id=None), 'task_templates': Value(dtype='null', id=None), 'version': {'version_str': Value(dtype='string', id=None), 'description': Value(dtype='string', id=None), 'major': Value(dtype='int64', id=None), 'minor': Value(dtype='int64', id=None), 'patch': Value(dtype='int64', id=None)}} because column names don't match ## Environment info - `datasets` version: 1.18.3 - Platform: Linux-5.4.144+-x86_64-with-Ubuntu-18.04-bionic - Python version: 3.7.12 - PyArrow version: 6.0.1
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3700/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3700/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3699
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3699/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3699/comments
https://api.github.com/repos/huggingface/datasets/issues/3699/events
https://github.com/huggingface/datasets/pull/3699
1,130,200,593
PR_kwDODunzps4yY49I
3,699
Add dev-only config to Natural Questions dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
2
2022-02-10T14:42:24Z
2022-02-11T09:50:22Z
2022-02-11T09:50:21Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3699.diff", "html_url": "https://github.com/huggingface/datasets/pull/3699", "merged_at": "2022-02-11T09:50:21Z", "patch_url": "https://github.com/huggingface/datasets/pull/3699.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3699" }
As suggested by @lhoestq and @thomwolf, a new config has been added to Natural Questions dataset, so that only dev split can be downloaded. Fix #413.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3699/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3699/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3698
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3698/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3698/comments
https://api.github.com/repos/huggingface/datasets/issues/3698/events
https://github.com/huggingface/datasets/pull/3698
1,129,864,282
PR_kwDODunzps4yXtyQ
3,698
Add finetune-data CodeFill
{ "avatar_url": "https://avatars.githubusercontent.com/u/49989029?v=4", "events_url": "https://api.github.com/users/rgismondi/events{/privacy}", "followers_url": "https://api.github.com/users/rgismondi/followers", "following_url": "https://api.github.com/users/rgismondi/following{/other_user}", "gists_url": "https://api.github.com/users/rgismondi/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/rgismondi", "id": 49989029, "login": "rgismondi", "node_id": "MDQ6VXNlcjQ5OTg5MDI5", "organizations_url": "https://api.github.com/users/rgismondi/orgs", "received_events_url": "https://api.github.com/users/rgismondi/received_events", "repos_url": "https://api.github.com/users/rgismondi/repos", "site_admin": false, "starred_url": "https://api.github.com/users/rgismondi/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/rgismondi/subscriptions", "type": "User", "url": "https://api.github.com/users/rgismondi" }
[ { "color": "0e8a16", "default": false, "description": "Contribution to a dataset script", "id": 4564477500, "name": "dataset contribution", "node_id": "LA_kwDODunzps8AAAABEBBmPA", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20contribution" } ]
closed
false
null
[]
null
1
2022-02-10T11:12:51Z
2022-10-03T09:36:18Z
2022-10-03T09:36:18Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3698.diff", "html_url": "https://github.com/huggingface/datasets/pull/3698", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3698.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3698" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3698/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3698/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3697
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3697/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3697/comments
https://api.github.com/repos/huggingface/datasets/issues/3697/events
https://github.com/huggingface/datasets/pull/3697
1,129,795,724
PR_kwDODunzps4yXeXo
3,697
Add code-fill datasets for pretraining/finetuning/evaluating
{ "avatar_url": "https://avatars.githubusercontent.com/u/49989029?v=4", "events_url": "https://api.github.com/users/rgismondi/events{/privacy}", "followers_url": "https://api.github.com/users/rgismondi/followers", "following_url": "https://api.github.com/users/rgismondi/following{/other_user}", "gists_url": "https://api.github.com/users/rgismondi/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/rgismondi", "id": 49989029, "login": "rgismondi", "node_id": "MDQ6VXNlcjQ5OTg5MDI5", "organizations_url": "https://api.github.com/users/rgismondi/orgs", "received_events_url": "https://api.github.com/users/rgismondi/received_events", "repos_url": "https://api.github.com/users/rgismondi/repos", "site_admin": false, "starred_url": "https://api.github.com/users/rgismondi/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/rgismondi/subscriptions", "type": "User", "url": "https://api.github.com/users/rgismondi" }
[]
closed
false
null
[]
null
1
2022-02-10T10:31:48Z
2022-07-06T15:19:58Z
2022-07-06T15:19:58Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3697.diff", "html_url": "https://github.com/huggingface/datasets/pull/3697", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3697.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3697" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3697/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3697/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3696
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3696/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3696/comments
https://api.github.com/repos/huggingface/datasets/issues/3696/events
https://github.com/huggingface/datasets/pull/3696
1,129,764,534
PR_kwDODunzps4yXXgH
3,696
Force unique keys in newsqa dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-10T10:09:19Z
2022-02-14T08:37:20Z
2022-02-14T08:37:19Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3696.diff", "html_url": "https://github.com/huggingface/datasets/pull/3696", "merged_at": "2022-02-14T08:37:19Z", "patch_url": "https://github.com/huggingface/datasets/pull/3696.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3696" }
Currently, it may raise `DuplicatedKeysError`. Fix #3630.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3696/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3696/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3695
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3695/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3695/comments
https://api.github.com/repos/huggingface/datasets/issues/3695/events
https://github.com/huggingface/datasets/pull/3695
1,129,730,148
PR_kwDODunzps4yXP44
3,695
Fix ClassLabel to/from dict when passed names_file
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-10T09:47:10Z
2022-02-11T23:02:32Z
2022-02-11T23:02:31Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3695.diff", "html_url": "https://github.com/huggingface/datasets/pull/3695", "merged_at": "2022-02-11T23:02:31Z", "patch_url": "https://github.com/huggingface/datasets/pull/3695.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3695" }
Currently, `names_file` is a field of the data class `ClassLabel`, thus appearing when transforming it to dict (when saving infos). Afterwards, when trying to read it from infos, it conflicts with the other field `names`. This PR, removes `names_file` as a field of the data class `ClassLabel`. - it is only used at instantiation to generate the `labels` field Fix #3631.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3695/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3695/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3693
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3693/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3693/comments
https://api.github.com/repos/huggingface/datasets/issues/3693/events
https://github.com/huggingface/datasets/pull/3693
1,128,554,365
PR_kwDODunzps4yTTcQ
3,693
Standardize to `Example::`
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
[]
closed
false
null
[]
null
1
2022-02-09T13:37:13Z
2022-02-17T10:20:55Z
2022-02-17T10:20:52Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3693.diff", "html_url": "https://github.com/huggingface/datasets/pull/3693", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3693.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3693" }
null
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3693/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3693/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3692
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3692/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3692/comments
https://api.github.com/repos/huggingface/datasets/issues/3692/events
https://github.com/huggingface/datasets/pull/3692
1,128,320,004
PR_kwDODunzps4yShiu
3,692
Update data URL in pubmed dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
2
2022-02-09T10:06:21Z
2022-02-14T14:15:42Z
2022-02-14T14:15:41Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3692.diff", "html_url": "https://github.com/huggingface/datasets/pull/3692", "merged_at": "2022-02-14T14:15:41Z", "patch_url": "https://github.com/huggingface/datasets/pull/3692.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3692" }
Fix #3655.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3692/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3692/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3691
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3691/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3691/comments
https://api.github.com/repos/huggingface/datasets/issues/3691/events
https://github.com/huggingface/datasets/pull/3691
1,127,629,306
PR_kwDODunzps4yQThV
3,691
Upgrade black to version ~=22.0
{ "avatar_url": "https://avatars.githubusercontent.com/u/30755778?v=4", "events_url": "https://api.github.com/users/LysandreJik/events{/privacy}", "followers_url": "https://api.github.com/users/LysandreJik/followers", "following_url": "https://api.github.com/users/LysandreJik/following{/other_user}", "gists_url": "https://api.github.com/users/LysandreJik/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/LysandreJik", "id": 30755778, "login": "LysandreJik", "node_id": "MDQ6VXNlcjMwNzU1Nzc4", "organizations_url": "https://api.github.com/users/LysandreJik/orgs", "received_events_url": "https://api.github.com/users/LysandreJik/received_events", "repos_url": "https://api.github.com/users/LysandreJik/repos", "site_admin": false, "starred_url": "https://api.github.com/users/LysandreJik/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/LysandreJik/subscriptions", "type": "User", "url": "https://api.github.com/users/LysandreJik" }
[]
closed
false
null
[]
null
0
2022-02-08T18:45:19Z
2022-02-08T19:56:40Z
2022-02-08T19:56:39Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3691.diff", "html_url": "https://github.com/huggingface/datasets/pull/3691", "merged_at": "2022-02-08T19:56:39Z", "patch_url": "https://github.com/huggingface/datasets/pull/3691.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3691" }
Upgrades the `datasets` library quality tool `black` to use the first stable release of `black`, version 22.0.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3691/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3691/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3690
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3690/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3690/comments
https://api.github.com/repos/huggingface/datasets/issues/3690/events
https://github.com/huggingface/datasets/pull/3690
1,127,493,538
PR_kwDODunzps4yP2p5
3,690
Update docs to new frontend/UI
{ "avatar_url": "https://avatars.githubusercontent.com/u/11827707?v=4", "events_url": "https://api.github.com/users/mishig25/events{/privacy}", "followers_url": "https://api.github.com/users/mishig25/followers", "following_url": "https://api.github.com/users/mishig25/following{/other_user}", "gists_url": "https://api.github.com/users/mishig25/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mishig25", "id": 11827707, "login": "mishig25", "node_id": "MDQ6VXNlcjExODI3NzA3", "organizations_url": "https://api.github.com/users/mishig25/orgs", "received_events_url": "https://api.github.com/users/mishig25/received_events", "repos_url": "https://api.github.com/users/mishig25/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mishig25/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mishig25/subscriptions", "type": "User", "url": "https://api.github.com/users/mishig25" }
[]
closed
false
null
[]
null
17
2022-02-08T16:38:09Z
2022-03-03T20:04:21Z
2022-03-03T20:04:20Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3690.diff", "html_url": "https://github.com/huggingface/datasets/pull/3690", "merged_at": "2022-03-03T20:04:20Z", "patch_url": "https://github.com/huggingface/datasets/pull/3690.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3690" }
### TLDR: Update `datasets` `docs` to the new syntax (markdown and mdx files) & frontend (as how it looks on [hf.co/transformers](https://huggingface.co/docs/transformers/index)) | Light mode | Dark mode | |-----------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------| | <img width="400" alt="Screenshot 2022-02-17 at 14 15 34" src="https://user-images.githubusercontent.com/11827707/154489358-e2fb3708-8d72-4fb6-93f0-51d4880321c0.png"> | <img width="400" alt="Screenshot 2022-02-17 at 14 16 27" src="https://user-images.githubusercontent.com/11827707/154489596-c5a1311b-181c-4341-adb3-d60a7d3abe85.png"> | ## Checklist - [x] update datasets docs to new syntax (should call `doc-builder convert`) (this PR) - [x] discuss `@property` methods frontend https://github.com/huggingface/doc-builder/pull/87 - [x] discuss `inject_arrow_table_documentation` (this PR) https://github.com/huggingface/datasets/pull/3690#discussion_r801847860 - [x] update datasets docs path on moon-landing https://github.com/huggingface/moon-landing/pull/2089 - [x] convert pyarrow docstring from Numpydoc style to groups style https://github.com/huggingface/doc-builder/pull/89(https://stackoverflow.com/a/24385103/6558628) - [x] handle `Raises` section on frontend and doc-builder https://github.com/huggingface/doc-builder/pull/86 - [x] check imgs path (this PR) (nothing to update here) - [x] doc exaples block has to follow format `Examples::` https://github.com/huggingface/datasets/pull/3693 - [x] fix [this docstring](https://github.com/huggingface/datasets/blob/6ed6ac9448311930557810383d2cfd4fe6aae269/src/datasets/arrow_dataset.py#L3339) (causing svelte compilation error) - [x] Delete sphinx related files - [x] Delete sphinx CI - [x] Update docs config in setup.py - [x] add `versions.yml` in doc-build https://github.com/huggingface/doc-build/pull/1 - [x] add `versions.yml` in doc-build-dev https://github.com/huggingface/doc-build-dev/pull/1 - [x] https://github.com/huggingface/moon-landing/pull/2089 - [x] format docstrings for example `datasets.DatasetBuilder.download_and_prepare` args format look wrong - [x] create new github actions. (can probably be in a separate PR) (see the transformers equivalents below) 1. [build_dev_documentation.yml](https://github.com/huggingface/transformers/blob/master/.github/workflows/build_dev_documentation.yml) 2. [build_documentation.yml](https://github.com/huggingface/transformers/blob/master/.github/workflows/build_documentation.yml) 3. [delete_dev_documentation.yml](https://github.com/huggingface/transformers/blob/master/.github/workflows/delete_dev_documentation.yml) ## Note to reviewers The number of changed files is a lot (100+) because I've converted all `.rst` files to `.mdx` files & they are compiling fine on the svelte side (also, moved all the imgs to to [doc-imgs repo](https://huggingface.co/datasets/huggingface/documentation-images/tree/main/datasets)). Moreover, you should just review them on preprod and see if the rendering look fine. _Therefore, I'd suggest to focus on the changed_ **`.py`** and **CI files** (github workflows, etc. you can use [this filter here](https://github.com/huggingface/datasets/pull/3690/files?file-filters%5B%5D=.py&file-filters%5B%5D=.yml&show-deleted-files=true&show-viewed-files=true)) during the review & ignore `.mdx` files. (if there's a bug in `.mdx` files, we can always handle it in a separate PR afterwards).
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 4, "total_count": 4, "url": "https://api.github.com/repos/huggingface/datasets/issues/3690/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3690/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3689
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3689/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3689/comments
https://api.github.com/repos/huggingface/datasets/issues/3689/events
https://github.com/huggingface/datasets/pull/3689
1,127,422,478
PR_kwDODunzps4yPnp7
3,689
Fix streaming for servers not supporting HTTP range requests
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
10
2022-02-08T15:41:05Z
2022-02-10T16:51:25Z
2022-02-10T16:51:25Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3689.diff", "html_url": "https://github.com/huggingface/datasets/pull/3689", "merged_at": "2022-02-10T16:51:24Z", "patch_url": "https://github.com/huggingface/datasets/pull/3689.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3689" }
Some servers do not support HTTP range requests, whereas this is required to stream some file formats (like ZIP). ~~This PR implements a workaround for those cases, by download the files locally in a temporary directory (cleaned up by the OS once the process is finished).~~ This PR raises custom error explaining that streaming is not possible because data host server does not support HTTP range requests. Fix #3677.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3689/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3689/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3688
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3688/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3688/comments
https://api.github.com/repos/huggingface/datasets/issues/3688/events
https://github.com/huggingface/datasets/issues/3688
1,127,218,321
I_kwDODunzps5DL_yR
3,688
Pyarrow version error
{ "avatar_url": "https://avatars.githubusercontent.com/u/49993443?v=4", "events_url": "https://api.github.com/users/Zaker237/events{/privacy}", "followers_url": "https://api.github.com/users/Zaker237/followers", "following_url": "https://api.github.com/users/Zaker237/following{/other_user}", "gists_url": "https://api.github.com/users/Zaker237/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Zaker237", "id": 49993443, "login": "Zaker237", "node_id": "MDQ6VXNlcjQ5OTkzNDQz", "organizations_url": "https://api.github.com/users/Zaker237/orgs", "received_events_url": "https://api.github.com/users/Zaker237/received_events", "repos_url": "https://api.github.com/users/Zaker237/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Zaker237/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Zaker237/subscriptions", "type": "User", "url": "https://api.github.com/users/Zaker237" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
3
2022-02-08T12:53:59Z
2022-02-09T06:35:33Z
2022-02-09T06:35:32Z
NONE
null
null
null
## Describe the bug I installed datasets(version 1.17.0, 1.18.0, 1.18.3) but i'm right now nor able to import it because of pyarrow. when i try to import it, i get the following error: `To use datasets, the module pyarrow>=3.0.0 is required, and the current version of pyarrow doesn't match this condition`. i tryed with all version of pyarrow execpt `4.0.0` but still get the same error. ## Steps to reproduce the bug ```python import datasets ``` ## Expected results A clear and concise description of the expected results. ## Actual results AttributeError Traceback (most recent call last) <ipython-input-19-652e886d387f> in <module> ----> 1 import datasets ~\AppData\Local\Continuum\anaconda3\lib\site-packages\datasets\__init__.py in <module> 26 27 ---> 28 if _version.parse(pyarrow.__version__).major < 3: 29 raise ImportWarning( 30 "To use `datasets`, the module `pyarrow>=3.0.0` is required, and the current version of `pyarrow` doesn't match this condition.\n" AttributeError: 'Version' object has no attribute 'major' ## Environment info Traceback (most recent call last): File "c:\users\alex\appdata\local\continuum\anaconda3\lib\runpy.py", line 193, in _run_module_as_main "__main__", mod_spec) File "c:\users\alex\appdata\local\continuum\anaconda3\lib\runpy.py", line 85, in _run_code exec(code, run_globals) File "C:\Users\Alex\AppData\Local\Continuum\anaconda3\Scripts\datasets-cli.exe\__main__.py", line 5, in <module> File "c:\users\alex\appdata\local\continuum\anaconda3\lib\site-packages\datasets\__init__.py", line 28, in <module> if _version.parse(pyarrow.__version__).major < 3: AttributeError: 'Version' object has no attribute 'major' - `datasets` version: - Platform: Linux(Ubuntu) and Windows: conda on the both - Python version: 3.7 - PyArrow version: 7.0.0
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3688/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3688/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3687
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3687/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3687/comments
https://api.github.com/repos/huggingface/datasets/issues/3687/events
https://github.com/huggingface/datasets/issues/3687
1,127,154,766
I_kwDODunzps5DLwRO
3,687
Can't get the text data when calling to_tf_dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/82086367?v=4", "events_url": "https://api.github.com/users/phrasenmaeher/events{/privacy}", "followers_url": "https://api.github.com/users/phrasenmaeher/followers", "following_url": "https://api.github.com/users/phrasenmaeher/following{/other_user}", "gists_url": "https://api.github.com/users/phrasenmaeher/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/phrasenmaeher", "id": 82086367, "login": "phrasenmaeher", "node_id": "MDQ6VXNlcjgyMDg2MzY3", "organizations_url": "https://api.github.com/users/phrasenmaeher/orgs", "received_events_url": "https://api.github.com/users/phrasenmaeher/received_events", "repos_url": "https://api.github.com/users/phrasenmaeher/repos", "site_admin": false, "starred_url": "https://api.github.com/users/phrasenmaeher/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/phrasenmaeher/subscriptions", "type": "User", "url": "https://api.github.com/users/phrasenmaeher" }
[]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/12866554?v=4", "events_url": "https://api.github.com/users/Rocketknight1/events{/privacy}", "followers_url": "https://api.github.com/users/Rocketknight1/followers", "following_url": "https://api.github.com/users/Rocketknight1/following{/other_user}", "gists_url": "https://api.github.com/users/Rocketknight1/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Rocketknight1", "id": 12866554, "login": "Rocketknight1", "node_id": "MDQ6VXNlcjEyODY2NTU0", "organizations_url": "https://api.github.com/users/Rocketknight1/orgs", "received_events_url": "https://api.github.com/users/Rocketknight1/received_events", "repos_url": "https://api.github.com/users/Rocketknight1/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Rocketknight1/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Rocketknight1/subscriptions", "type": "User", "url": "https://api.github.com/users/Rocketknight1" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/12866554?v=4", "events_url": "https://api.github.com/users/Rocketknight1/events{/privacy}", "followers_url": "https://api.github.com/users/Rocketknight1/followers", "following_url": "https://api.github.com/users/Rocketknight1/following{/other_user}", "gists_url": "https://api.github.com/users/Rocketknight1/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/Rocketknight1", "id": 12866554, "login": "Rocketknight1", "node_id": "MDQ6VXNlcjEyODY2NTU0", "organizations_url": "https://api.github.com/users/Rocketknight1/orgs", "received_events_url": "https://api.github.com/users/Rocketknight1/received_events", "repos_url": "https://api.github.com/users/Rocketknight1/repos", "site_admin": false, "starred_url": "https://api.github.com/users/Rocketknight1/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Rocketknight1/subscriptions", "type": "User", "url": "https://api.github.com/users/Rocketknight1" } ]
null
6
2022-02-08T11:52:10Z
2023-01-19T14:55:18Z
2023-01-19T14:55:18Z
NONE
null
null
null
I am working with the SST2 dataset, and am using TensorFlow 2.5 I'd like to convert it to a `tf.data.Dataset` by calling the `to_tf_dataset` method. The following snippet is what I am using to achieve this: ``` from datasets import load_dataset from transformers import DefaultDataCollator data_collator = DefaultDataCollator(return_tensors="tf") dataset = load_dataset("sst") train_dataset = dataset["train"].to_tf_dataset(columns=['sentence'], label_cols="label", shuffle=True, batch_size=8,collate_fn=data_collator) ``` However, this only gets me the labels; the text--the most important part--is missing: ``` for s in train_dataset.take(1): print(s) #prints something like: ({}, <tf.Tensor: shape=(8,), ...>) ``` As you can see, it only returns the label part, not the data, as indicated by the empty dictionary, `{}`. So far, I've played with various settings of the method arguments, but to no avail; I do not want to perform any text processing at this time. On my quest to achieve what I want ( a `tf.data.Dataset`), I've consulted these resources: [https://www.philschmid.de/huggingface-transformers-keras-tf](https://www.philschmid.de/huggingface-transformers-keras-tf) [https://huggingface.co/docs/datasets/use_dataset.html?highlight=tensorflow](https://huggingface.co/docs/datasets/use_dataset.html?highlight=tensorflow) I was surprised to not find more extensive examples on how to transform a Hugginface dataset to one compatible with TensorFlow. If you could point me to where I am going wrong, please do so. Thanks in advance for your support. --- Edit: In the [docs](https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.to_tf_dataset), I found the following description: _In general, only columns that the model can use as input should be included here (numeric data only)._ Does this imply that no textual, i.e., `string` data can be loaded?
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3687/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3687/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3686
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3686/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3686/comments
https://api.github.com/repos/huggingface/datasets/issues/3686/events
https://github.com/huggingface/datasets/issues/3686
1,127,137,290
I_kwDODunzps5DLsAK
3,686
`Translation` features cannot be `flatten`ed
{ "avatar_url": "https://avatars.githubusercontent.com/u/33657802?v=4", "events_url": "https://api.github.com/users/SBrandeis/events{/privacy}", "followers_url": "https://api.github.com/users/SBrandeis/followers", "following_url": "https://api.github.com/users/SBrandeis/following{/other_user}", "gists_url": "https://api.github.com/users/SBrandeis/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/SBrandeis", "id": 33657802, "login": "SBrandeis", "node_id": "MDQ6VXNlcjMzNjU3ODAy", "organizations_url": "https://api.github.com/users/SBrandeis/orgs", "received_events_url": "https://api.github.com/users/SBrandeis/received_events", "repos_url": "https://api.github.com/users/SBrandeis/repos", "site_admin": false, "starred_url": "https://api.github.com/users/SBrandeis/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/SBrandeis/subscriptions", "type": "User", "url": "https://api.github.com/users/SBrandeis" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" } ]
null
1
2022-02-08T11:33:48Z
2022-03-18T17:28:13Z
2022-03-18T17:28:13Z
CONTRIBUTOR
null
null
null
## Describe the bug (`Dataset.flatten`)[https://github.com/huggingface/datasets/blob/master/src/datasets/arrow_dataset.py#L1265] fails for columns with feature (`Translation`)[https://github.com/huggingface/datasets/blob/3edbeb0ec6519b79f1119adc251a1a6b379a2c12/src/datasets/features/translation.py#L8] ## Steps to reproduce the bug ```python from datasets import load_dataset dataset = load_dataset("europa_ecdc_tm", "en2fr", split="train[:10]") print(dataset.features) # {'translation': Translation(languages=['en', 'fr'], id=None)} print(dataset[0]) # {'translation': {'en': 'Vaccination against hepatitis C is not yet available.', 'fr': 'Aucune vaccination contre l’hépatite C n’est encore disponible.'}} dataset.flatten() ``` ## Expected results `dataset.flatten` should flatten the `Translation` column as if it were a dict of `Value("string")` ```python dataset[0] # {'translation.en': 'Vaccination against hepatitis C is not yet available.', 'translation.fr': 'Aucune vaccination contre l’hépatite C n’est encore disponible.' } dataset.features # {'translation.en': Value("string"), 'translation.fr': Value("string")} ``` ## Actual results ```python In [31]: dset.flatten() --------------------------------------------------------------------------- KeyError Traceback (most recent call last) <ipython-input-31-bb88eb5276ee> in <module> ----> 1 dset.flatten() [...]\site-packages\datasets\fingerprint.py in wrapper(*args, **kwargs) 411 # Call actual function 412 --> 413 out = func(self, *args, **kwargs) 414 415 # Update fingerprint of in-place transforms + update in-place history of transforms [...]\site-packages\datasets\arrow_dataset.py in flatten(self, new_fingerprint, max_depth) 1294 break 1295 dataset.info.features = self.features.flatten(max_depth=max_depth) -> 1296 dataset._data = update_metadata_with_features(dataset._data, dataset.features) 1297 logger.info(f'Flattened dataset from depth {depth} to depth {1 if depth + 1 < max_depth else "unknown"}.') 1298 dataset._fingerprint = new_fingerprint [...]\site-packages\datasets\arrow_dataset.py in update_metadata_with_features(table, features) 534 def update_metadata_with_features(table: Table, features: Features): 535 """To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.""" --> 536 features = Features({col_name: features[col_name] for col_name in table.column_names}) 537 if table.schema.metadata is None or b"huggingface" not in table.schema.metadata: 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features)) [...]\site-packages\datasets\arrow_dataset.py in <dictcomp>(.0) 534 def update_metadata_with_features(table: Table, features: Features): 535 """To be used in dataset transforms that modify the features of the dataset, in order to update the features stored in the metadata of its schema.""" --> 536 features = Features({col_name: features[col_name] for col_name in table.column_names}) 537 if table.schema.metadata is None or b"huggingface" not in table.schema.metadata: 538 pa_metadata = ArrowWriter._build_metadata(DatasetInfo(features=features)) KeyError: 'translation.en' ``` ## Environment info - `datasets` version: 1.18.3 - Platform: Windows-10-10.0.19041-SP0 - Python version: 3.7.10 - PyArrow version: 3.0.0
{ "+1": 1, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 1, "url": "https://api.github.com/repos/huggingface/datasets/issues/3686/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3686/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3685
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3685/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3685/comments
https://api.github.com/repos/huggingface/datasets/issues/3685/events
https://github.com/huggingface/datasets/pull/3685
1,126,240,444
PR_kwDODunzps4yLw3m
3,685
Add support for `Audio` and `Image` feature in `push_to_hub`
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[]
closed
false
null
[]
null
3
2022-02-07T16:47:16Z
2022-02-14T18:14:57Z
2022-02-14T18:04:58Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3685.diff", "html_url": "https://github.com/huggingface/datasets/pull/3685", "merged_at": "2022-02-14T18:04:58Z", "patch_url": "https://github.com/huggingface/datasets/pull/3685.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3685" }
Add support for the `Audio` and the `Image` feature in `push_to_hub`. The idea is to remove local path information and store file content under "bytes" in the Arrow table before the push. My initial approach (https://github.com/huggingface/datasets/commit/34c652afeff9686b6b8bf4e703c84d2205d670aa) was to use a map transform similar to [`decode_nested_example`](https://github.com/huggingface/datasets/blob/5e0f6068741464f833ff1802e24ecc2064aaea9f/src/datasets/features/features.py#L1023-L1056) while having decoding turned off, but I wasn't satisfied with the code quality, so I ended up using the `temporary_assignment` decorator to override `cast_storage`, which allows me to directly modify the underlying storage (the final op is similar to `Dataset.cast`) and results in a much simpler code. Additionally, I added the `allow_cast` flag that can disable this behavior in the situations where it's not needed (e.g. the dataset is already in the correct format for the Hub, etc.) EDIT: `allow_cast` renamed to `embed_external_files`
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3685/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3685/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3684
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3684/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3684/comments
https://api.github.com/repos/huggingface/datasets/issues/3684/events
https://github.com/huggingface/datasets/pull/3684
1,125,133,664
PR_kwDODunzps4yIOer
3,684
[fix]: iwslt2017 download urls
{ "avatar_url": "https://avatars.githubusercontent.com/u/48395294?v=4", "events_url": "https://api.github.com/users/msarmi9/events{/privacy}", "followers_url": "https://api.github.com/users/msarmi9/followers", "following_url": "https://api.github.com/users/msarmi9/following{/other_user}", "gists_url": "https://api.github.com/users/msarmi9/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/msarmi9", "id": 48395294, "login": "msarmi9", "node_id": "MDQ6VXNlcjQ4Mzk1Mjk0", "organizations_url": "https://api.github.com/users/msarmi9/orgs", "received_events_url": "https://api.github.com/users/msarmi9/received_events", "repos_url": "https://api.github.com/users/msarmi9/repos", "site_admin": false, "starred_url": "https://api.github.com/users/msarmi9/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/msarmi9/subscriptions", "type": "User", "url": "https://api.github.com/users/msarmi9" }
[ { "color": "0e8a16", "default": false, "description": "Contribution to a dataset script", "id": 4564477500, "name": "dataset contribution", "node_id": "LA_kwDODunzps8AAAABEBBmPA", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20contribution" } ]
closed
false
null
[]
null
7
2022-02-06T07:56:55Z
2022-09-22T16:20:19Z
2022-09-22T16:20:18Z
NONE
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3684.diff", "html_url": "https://github.com/huggingface/datasets/pull/3684", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3684.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3684" }
Fixes #2076.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3684/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3684/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3683
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3683/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3683/comments
https://api.github.com/repos/huggingface/datasets/issues/3683/events
https://github.com/huggingface/datasets/pull/3683
1,124,458,371
PR_kwDODunzps4yGKoj
3,683
added told-br (brazilian hate speech) dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/26556320?v=4", "events_url": "https://api.github.com/users/JAugusto97/events{/privacy}", "followers_url": "https://api.github.com/users/JAugusto97/followers", "following_url": "https://api.github.com/users/JAugusto97/following{/other_user}", "gists_url": "https://api.github.com/users/JAugusto97/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/JAugusto97", "id": 26556320, "login": "JAugusto97", "node_id": "MDQ6VXNlcjI2NTU2MzIw", "organizations_url": "https://api.github.com/users/JAugusto97/orgs", "received_events_url": "https://api.github.com/users/JAugusto97/received_events", "repos_url": "https://api.github.com/users/JAugusto97/repos", "site_admin": false, "starred_url": "https://api.github.com/users/JAugusto97/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/JAugusto97/subscriptions", "type": "User", "url": "https://api.github.com/users/JAugusto97" }
[]
closed
false
null
[]
null
2
2022-02-04T17:44:32Z
2022-02-07T21:14:52Z
2022-02-07T21:14:52Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3683.diff", "html_url": "https://github.com/huggingface/datasets/pull/3683", "merged_at": "2022-02-07T21:14:52Z", "patch_url": "https://github.com/huggingface/datasets/pull/3683.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3683" }
Hey, Adding ToLD-Br. Feel free to ask for modifications. Thanks!!
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3683/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3683/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3682
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3682/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3682/comments
https://api.github.com/repos/huggingface/datasets/issues/3682/events
https://github.com/huggingface/datasets/pull/3682
1,124,434,330
PR_kwDODunzps4yGFml
3,682
adding told-br for toxic/abusive hatespeech detection
{ "avatar_url": "https://avatars.githubusercontent.com/u/26556320?v=4", "events_url": "https://api.github.com/users/JAugusto97/events{/privacy}", "followers_url": "https://api.github.com/users/JAugusto97/followers", "following_url": "https://api.github.com/users/JAugusto97/following{/other_user}", "gists_url": "https://api.github.com/users/JAugusto97/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/JAugusto97", "id": 26556320, "login": "JAugusto97", "node_id": "MDQ6VXNlcjI2NTU2MzIw", "organizations_url": "https://api.github.com/users/JAugusto97/orgs", "received_events_url": "https://api.github.com/users/JAugusto97/received_events", "repos_url": "https://api.github.com/users/JAugusto97/repos", "site_admin": false, "starred_url": "https://api.github.com/users/JAugusto97/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/JAugusto97/subscriptions", "type": "User", "url": "https://api.github.com/users/JAugusto97" }
[]
closed
false
null
[]
null
2
2022-02-04T17:18:29Z
2022-02-07T03:23:24Z
2022-02-04T17:36:40Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3682.diff", "html_url": "https://github.com/huggingface/datasets/pull/3682", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3682.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3682" }
Hey, I'm adding our dataset from our paper published at AACL 2020. Feel free to ask for modifications. Thanks!
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3682/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3682/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3681
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3681/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3681/comments
https://api.github.com/repos/huggingface/datasets/issues/3681/events
https://github.com/huggingface/datasets/pull/3681
1,124,237,458
PR_kwDODunzps4yFcpM
3,681
Fix TestCommand to move dataset_infos instead of copying
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
6
2022-02-04T14:01:52Z
2023-09-24T10:00:11Z
2023-09-24T09:59:55Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3681.diff", "html_url": "https://github.com/huggingface/datasets/pull/3681", "merged_at": null, "patch_url": "https://github.com/huggingface/datasets/pull/3681.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3681" }
Why do we copy instead of moving the file? CC: @lhoestq @lvwerra
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3681/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3681/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3680
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3680/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3680/comments
https://api.github.com/repos/huggingface/datasets/issues/3680/events
https://github.com/huggingface/datasets/pull/3680
1,124,213,416
PR_kwDODunzps4yFXm8
3,680
Fix TestCommand to copy dataset_infos to local dir with only data files
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[]
closed
false
null
[]
null
0
2022-02-04T13:36:46Z
2022-02-08T10:32:55Z
2022-02-08T10:32:55Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3680.diff", "html_url": "https://github.com/huggingface/datasets/pull/3680", "merged_at": "2022-02-08T10:32:55Z", "patch_url": "https://github.com/huggingface/datasets/pull/3680.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3680" }
Currently this case is missed. CC: @lvwerra
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3680/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3680/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3679
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3679/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3679/comments
https://api.github.com/repos/huggingface/datasets/issues/3679/events
https://github.com/huggingface/datasets/issues/3679
1,124,062,133
I_kwDODunzps5C_9O1
3,679
Download datasets from a private hub
{ "avatar_url": "https://avatars.githubusercontent.com/u/3436143?v=4", "events_url": "https://api.github.com/users/juliensimon/events{/privacy}", "followers_url": "https://api.github.com/users/juliensimon/followers", "following_url": "https://api.github.com/users/juliensimon/following{/other_user}", "gists_url": "https://api.github.com/users/juliensimon/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/juliensimon", "id": 3436143, "login": "juliensimon", "node_id": "MDQ6VXNlcjM0MzYxNDM=", "organizations_url": "https://api.github.com/users/juliensimon/orgs", "received_events_url": "https://api.github.com/users/juliensimon/received_events", "repos_url": "https://api.github.com/users/juliensimon/repos", "site_admin": false, "starred_url": "https://api.github.com/users/juliensimon/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/juliensimon/subscriptions", "type": "User", "url": "https://api.github.com/users/juliensimon" }
[ { "color": "a2eeef", "default": true, "description": "New feature or request", "id": 1935892871, "name": "enhancement", "node_id": "MDU6TGFiZWwxOTM1ODkyODcx", "url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement" }, { "color": "A929D8", "default": false, "description": "", "id": 3814924348, "name": "private-hub", "node_id": "LA_kwDODunzps7jYyA8", "url": "https://api.github.com/repos/huggingface/datasets/labels/private-hub" } ]
closed
false
null
[]
null
3
2022-02-04T10:49:06Z
2022-02-22T11:08:07Z
2022-02-22T11:08:07Z
NONE
null
null
null
In the context of a private hub deployment, customers would like to use load_dataset() to load datasets from their hub, not from the public hub. This doesn't seem to be configurable at the moment and it would be nice to add this feature. The obvious workaround is to clone the repo first and then load it from local storage, but this adds an extra step. It'd be great to have the same experience regardless of where the hub is hosted. The same issue exists with the transformers library and the CLI. I'm going to create issues there as well, and I'll reference them below.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3679/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3679/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3678
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3678/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3678/comments
https://api.github.com/repos/huggingface/datasets/issues/3678/events
https://github.com/huggingface/datasets/pull/3678
1,123,402,426
PR_kwDODunzps4yCt91
3,678
Add code example in wikipedia card
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
null
[]
null
0
2022-02-03T18:09:02Z
2022-02-21T09:14:56Z
2022-02-04T13:21:39Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3678.diff", "html_url": "https://github.com/huggingface/datasets/pull/3678", "merged_at": "2022-02-04T13:21:39Z", "patch_url": "https://github.com/huggingface/datasets/pull/3678.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3678" }
Close #3292.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3678/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3678/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3677
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3677/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3677/comments
https://api.github.com/repos/huggingface/datasets/issues/3677/events
https://github.com/huggingface/datasets/issues/3677
1,123,192,866
I_kwDODunzps5C8pAi
3,677
Discovery cannot be streamed anymore
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4", "events_url": "https://api.github.com/users/albertvillanova/events{/privacy}", "followers_url": "https://api.github.com/users/albertvillanova/followers", "following_url": "https://api.github.com/users/albertvillanova/following{/other_user}", "gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/albertvillanova", "id": 8515462, "login": "albertvillanova", "node_id": "MDQ6VXNlcjg1MTU0NjI=", "organizations_url": "https://api.github.com/users/albertvillanova/orgs", "received_events_url": "https://api.github.com/users/albertvillanova/received_events", "repos_url": "https://api.github.com/users/albertvillanova/repos", "site_admin": false, "starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions", "type": "User", "url": "https://api.github.com/users/albertvillanova" } ]
null
2
2022-02-03T15:02:03Z
2022-02-10T16:51:24Z
2022-02-10T16:51:24Z
CONTRIBUTOR
null
null
null
## Describe the bug A clear and concise description of what the bug is. ## Steps to reproduce the bug ```python from datasets import load_dataset iterable_dataset = load_dataset("discovery", name="discovery", split="train", streaming=True) list(iterable_dataset.take(1)) ``` ## Expected results The first row of the train split. ## Actual results ``` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 365, in __iter__ for key, example in self._iter(): File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 362, in _iter yield from ex_iterable File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 272, in __iter__ yield from islice(self.ex_iterable, self.n) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/iterable_dataset.py", line 79, in __iter__ yield from self.generate_examples_fn(**self.kwargs) File "/home/slesage/.cache/huggingface/modules/datasets_modules/datasets/discovery/542fab7a9ddc1d9726160355f7baa06a1ccc44c40bc8e12c09e9bc743aca43a2/discovery.py", line 333, in _generate_examples with open(data_file, encoding="utf8") as f: File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/streaming.py", line 64, in wrapper return function(*args, use_auth_token=use_auth_token, **kwargs) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/datasets/utils/streaming_download_manager.py", line 369, in xopen file_obj = fsspec.open(file, mode=mode, *args, **kwargs).open() File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/core.py", line 456, in open return open_files( File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/core.py", line 288, in open_files fs, fs_token, paths = get_fs_token_paths( File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/core.py", line 611, in get_fs_token_paths fs = filesystem(protocol, **inkwargs) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/registry.py", line 253, in filesystem return cls(**storage_options) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/spec.py", line 68, in __call__ obj = super().__call__(*args, **kwargs) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/implementations/zip.py", line 57, in __init__ self.zip = zipfile.ZipFile(self.fo) File "/home/slesage/.pyenv/versions/3.9.6/lib/python3.9/zipfile.py", line 1257, in __init__ self._RealGetContents() File "/home/slesage/.pyenv/versions/3.9.6/lib/python3.9/zipfile.py", line 1320, in _RealGetContents endrec = _EndRecData(fp) File "/home/slesage/.pyenv/versions/3.9.6/lib/python3.9/zipfile.py", line 263, in _EndRecData fpin.seek(0, 2) File "/home/slesage/hf/datasets-preview-backend/.venv/lib/python3.9/site-packages/fsspec/implementations/http.py", line 676, in seek raise ValueError("Cannot seek streaming HTTP file") ValueError: Cannot seek streaming HTTP file ``` ## Environment info - `datasets` version: 1.18.3 - Platform: Linux-5.11.0-1027-aws-x86_64-with-glibc2.31 - Python version: 3.9.6 - PyArrow version: 6.0.1
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3677/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3677/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3676
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3676/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3676/comments
https://api.github.com/repos/huggingface/datasets/issues/3676/events
https://github.com/huggingface/datasets/issues/3676
1,123,096,362
I_kwDODunzps5C8Rcq
3,676
`None` replaced by `[]` after first batch in map
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4", "events_url": "https://api.github.com/users/lhoestq/events{/privacy}", "followers_url": "https://api.github.com/users/lhoestq/followers", "following_url": "https://api.github.com/users/lhoestq/following{/other_user}", "gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lhoestq", "id": 42851186, "login": "lhoestq", "node_id": "MDQ6VXNlcjQyODUxMTg2", "organizations_url": "https://api.github.com/users/lhoestq/orgs", "received_events_url": "https://api.github.com/users/lhoestq/received_events", "repos_url": "https://api.github.com/users/lhoestq/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions", "type": "User", "url": "https://api.github.com/users/lhoestq" } ]
null
8
2022-02-03T13:36:48Z
2022-10-28T13:13:20Z
2022-10-28T13:13:20Z
MEMBER
null
null
null
Sometimes `None` can be replaced by `[]` when running map: ```python from datasets import Dataset ds = Dataset.from_dict({"a": range(4)}) ds = ds.map(lambda x: {"b": [[None, [0]]]}, batched=True, batch_size=1, remove_columns=["a"]) print(ds.to_pandas()) # b # 0 [None, [0]] # 1 [[], [0]] # 2 [[], [0]] # 3 [[], [0]] ``` This issue has been experienced when running the `run_qa.py` example from `transformers` (see issue https://github.com/huggingface/transformers/issues/15401) This can be due to a bug in when casting `None` in nested lists. Casting only happens after the first batch, since the first batch is used to infer the feature types. cc @sgugger
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 2, "heart": 1, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 3, "url": "https://api.github.com/repos/huggingface/datasets/issues/3676/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3676/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3675
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3675/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3675/comments
https://api.github.com/repos/huggingface/datasets/issues/3675/events
https://github.com/huggingface/datasets/issues/3675
1,123,078,408
I_kwDODunzps5C8NEI
3,675
Add CodeContests dataset
{ "avatar_url": "https://avatars.githubusercontent.com/u/47462742?v=4", "events_url": "https://api.github.com/users/mariosasko/events{/privacy}", "followers_url": "https://api.github.com/users/mariosasko/followers", "following_url": "https://api.github.com/users/mariosasko/following{/other_user}", "gists_url": "https://api.github.com/users/mariosasko/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/mariosasko", "id": 47462742, "login": "mariosasko", "node_id": "MDQ6VXNlcjQ3NDYyNzQy", "organizations_url": "https://api.github.com/users/mariosasko/orgs", "received_events_url": "https://api.github.com/users/mariosasko/received_events", "repos_url": "https://api.github.com/users/mariosasko/repos", "site_admin": false, "starred_url": "https://api.github.com/users/mariosasko/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/mariosasko/subscriptions", "type": "User", "url": "https://api.github.com/users/mariosasko" }
[ { "color": "e99695", "default": false, "description": "Requesting to add a new dataset", "id": 2067376369, "name": "dataset request", "node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request" } ]
closed
false
null
[]
null
2
2022-02-03T13:20:00Z
2022-07-20T11:07:05Z
2022-07-20T11:07:05Z
CONTRIBUTOR
null
null
null
## Adding a Dataset - **Name:** CodeContests - **Description:** CodeContests is a competitive programming dataset for machine-learning. - **Paper:** - **Data:** https://github.com/deepmind/code_contests - **Motivation:** This dataset was used when training [AlphaCode](https://deepmind.com/blog/article/Competitive-programming-with-AlphaCode). Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md).
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3675/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3675/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3674
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3674/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3674/comments
https://api.github.com/repos/huggingface/datasets/issues/3674/events
https://github.com/huggingface/datasets/pull/3674
1,123,027,874
PR_kwDODunzps4yBe17
3,674
Add FrugalScore metric
{ "avatar_url": "https://avatars.githubusercontent.com/u/28675016?v=4", "events_url": "https://api.github.com/users/moussaKam/events{/privacy}", "followers_url": "https://api.github.com/users/moussaKam/followers", "following_url": "https://api.github.com/users/moussaKam/following{/other_user}", "gists_url": "https://api.github.com/users/moussaKam/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/moussaKam", "id": 28675016, "login": "moussaKam", "node_id": "MDQ6VXNlcjI4Njc1MDE2", "organizations_url": "https://api.github.com/users/moussaKam/orgs", "received_events_url": "https://api.github.com/users/moussaKam/received_events", "repos_url": "https://api.github.com/users/moussaKam/repos", "site_admin": false, "starred_url": "https://api.github.com/users/moussaKam/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/moussaKam/subscriptions", "type": "User", "url": "https://api.github.com/users/moussaKam" }
[]
closed
false
null
[]
null
5
2022-02-03T12:28:52Z
2022-02-21T15:58:44Z
2022-02-21T15:58:44Z
CONTRIBUTOR
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3674.diff", "html_url": "https://github.com/huggingface/datasets/pull/3674", "merged_at": "2022-02-21T15:58:44Z", "patch_url": "https://github.com/huggingface/datasets/pull/3674.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3674" }
This pull request add FrugalScore metric for NLG systems evaluation. FrugalScore is a reference-based metric for NLG models evaluation. It is based on a distillation approach that allows to learn a fixed, low cost version of any expensive NLG metric, while retaining most of its original performance. Paper: https://arxiv.org/abs/2110.08559?context=cs Github: https://github.com/moussaKam/FrugalScore @lhoestq
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3674/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3674/timeline
null
null
true
https://api.github.com/repos/huggingface/datasets/issues/3673
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3673/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3673/comments
https://api.github.com/repos/huggingface/datasets/issues/3673/events
https://github.com/huggingface/datasets/issues/3673
1,123,010,520
I_kwDODunzps5C78fY
3,673
`load_dataset("snli")` is different from dataset viewer
{ "avatar_url": "https://avatars.githubusercontent.com/u/61748653?v=4", "events_url": "https://api.github.com/users/pietrolesci/events{/privacy}", "followers_url": "https://api.github.com/users/pietrolesci/followers", "following_url": "https://api.github.com/users/pietrolesci/following{/other_user}", "gists_url": "https://api.github.com/users/pietrolesci/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/pietrolesci", "id": 61748653, "login": "pietrolesci", "node_id": "MDQ6VXNlcjYxNzQ4NjUz", "organizations_url": "https://api.github.com/users/pietrolesci/orgs", "received_events_url": "https://api.github.com/users/pietrolesci/received_events", "repos_url": "https://api.github.com/users/pietrolesci/repos", "site_admin": false, "starred_url": "https://api.github.com/users/pietrolesci/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/pietrolesci/subscriptions", "type": "User", "url": "https://api.github.com/users/pietrolesci" }
[ { "color": "d73a4a", "default": true, "description": "Something isn't working", "id": 1935892857, "name": "bug", "node_id": "MDU6TGFiZWwxOTM1ODkyODU3", "url": "https://api.github.com/repos/huggingface/datasets/labels/bug" }, { "color": "E5583E", "default": false, "description": "Related to the dataset viewer on huggingface.co", "id": 3470211881, "name": "dataset-viewer", "node_id": "LA_kwDODunzps7O1zsp", "url": "https://api.github.com/repos/huggingface/datasets/labels/dataset-viewer" } ]
closed
false
{ "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" }
[ { "avatar_url": "https://avatars.githubusercontent.com/u/1676121?v=4", "events_url": "https://api.github.com/users/severo/events{/privacy}", "followers_url": "https://api.github.com/users/severo/followers", "following_url": "https://api.github.com/users/severo/following{/other_user}", "gists_url": "https://api.github.com/users/severo/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/severo", "id": 1676121, "login": "severo", "node_id": "MDQ6VXNlcjE2NzYxMjE=", "organizations_url": "https://api.github.com/users/severo/orgs", "received_events_url": "https://api.github.com/users/severo/received_events", "repos_url": "https://api.github.com/users/severo/repos", "site_admin": false, "starred_url": "https://api.github.com/users/severo/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/severo/subscriptions", "type": "User", "url": "https://api.github.com/users/severo" } ]
null
11
2022-02-03T12:10:43Z
2022-02-16T11:22:31Z
2022-02-11T17:01:21Z
NONE
null
null
null
## Describe the bug The dataset that is downloaded from the Hub via `load_dataset("snli")` is different from what is available in the dataset viewer. In the viewer the labels are not encoded (i.e., "neutral", "entailment", "contradiction"), while the downloaded dataset shows the encoded labels (i.e., 0, 1, 2). Is this expected? ## Environment info <!-- You can run the command `datasets-cli env` and copy-and-paste its output below. --> - `datasets` version: - Platform: Ubuntu 20.4 - Python version: 3.7
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3673/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3673/timeline
null
completed
false
https://api.github.com/repos/huggingface/datasets/issues/3672
https://api.github.com/repos/huggingface/datasets
https://api.github.com/repos/huggingface/datasets/issues/3672/labels{/name}
https://api.github.com/repos/huggingface/datasets/issues/3672/comments
https://api.github.com/repos/huggingface/datasets/issues/3672/events
https://github.com/huggingface/datasets/pull/3672
1,122,980,556
PR_kwDODunzps4yBUrZ
3,672
Prioritize `module.builder_kwargs` over defaults in `TestCommand`
{ "avatar_url": "https://avatars.githubusercontent.com/u/8264887?v=4", "events_url": "https://api.github.com/users/lvwerra/events{/privacy}", "followers_url": "https://api.github.com/users/lvwerra/followers", "following_url": "https://api.github.com/users/lvwerra/following{/other_user}", "gists_url": "https://api.github.com/users/lvwerra/gists{/gist_id}", "gravatar_id": "", "html_url": "https://github.com/lvwerra", "id": 8264887, "login": "lvwerra", "node_id": "MDQ6VXNlcjgyNjQ4ODc=", "organizations_url": "https://api.github.com/users/lvwerra/orgs", "received_events_url": "https://api.github.com/users/lvwerra/received_events", "repos_url": "https://api.github.com/users/lvwerra/repos", "site_admin": false, "starred_url": "https://api.github.com/users/lvwerra/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/lvwerra/subscriptions", "type": "User", "url": "https://api.github.com/users/lvwerra" }
[]
closed
false
null
[]
null
0
2022-02-03T11:38:42Z
2022-02-04T12:37:20Z
2022-02-04T12:37:19Z
MEMBER
null
0
{ "diff_url": "https://github.com/huggingface/datasets/pull/3672.diff", "html_url": "https://github.com/huggingface/datasets/pull/3672", "merged_at": "2022-02-04T12:37:19Z", "patch_url": "https://github.com/huggingface/datasets/pull/3672.patch", "url": "https://api.github.com/repos/huggingface/datasets/pulls/3672" }
This fixes a bug in the `TestCommand` where multiple kwargs for `name` were passed if it was set in both default and `module.builder_kwargs`. Example error: ```Python Traceback (most recent call last): File "create_metadata.py", line 96, in <module> main(**vars(args)) File "create_metadata.py", line 86, in main metadata_command.run() File "/opt/conda/lib/python3.7/site-packages/datasets/commands/test.py", line 144, in run for j, builder in enumerate(get_builders()): File "/opt/conda/lib/python3.7/site-packages/datasets/commands/test.py", line 141, in get_builders name=name, cache_dir=self._cache_dir, data_dir=self._data_dir, **module.builder_kwargs TypeError: type object got multiple values for keyword argument 'name' ``` Let me know what you think.
{ "+1": 0, "-1": 0, "confused": 0, "eyes": 0, "heart": 0, "hooray": 0, "laugh": 0, "rocket": 0, "total_count": 0, "url": "https://api.github.com/repos/huggingface/datasets/issues/3672/reactions" }
https://api.github.com/repos/huggingface/datasets/issues/3672/timeline
null
null
true