url
stringlengths 58
61
| repository_url
stringclasses 1
value | labels_url
stringlengths 72
75
| comments_url
stringlengths 67
70
| events_url
stringlengths 65
68
| html_url
stringlengths 46
51
| id
int64 599M
2.12B
| node_id
stringlengths 18
32
| number
int64 1
6.65k
| title
stringlengths 1
290
| user
dict | labels
listlengths 0
4
| state
stringclasses 2
values | locked
bool 1
class | assignee
dict | assignees
listlengths 0
4
| milestone
dict | comments
int64 0
70
| created_at
unknown | updated_at
unknown | closed_at
unknown | author_association
stringclasses 3
values | active_lock_reason
float64 | draft
float64 0
1
⌀ | pull_request
dict | body
stringlengths 0
228k
⌀ | reactions
dict | timeline_url
stringlengths 67
70
| performed_via_github_app
float64 | state_reason
stringclasses 3
values | is_pull_request
bool 2
classes |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://api.github.com/repos/huggingface/datasets/issues/1731 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1731/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1731/comments | https://api.github.com/repos/huggingface/datasets/issues/1731/events | https://github.com/huggingface/datasets/issues/1731 | 784,744,674 | MDU6SXNzdWU3ODQ3NDQ2NzQ= | 1,731 | Couldn't reach swda.py | {
"avatar_url": "https://avatars.githubusercontent.com/u/13365326?v=4",
"events_url": "https://api.github.com/users/yangp725/events{/privacy}",
"followers_url": "https://api.github.com/users/yangp725/followers",
"following_url": "https://api.github.com/users/yangp725/following{/other_user}",
"gists_url": "https://api.github.com/users/yangp725/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/yangp725",
"id": 13365326,
"login": "yangp725",
"node_id": "MDQ6VXNlcjEzMzY1MzI2",
"organizations_url": "https://api.github.com/users/yangp725/orgs",
"received_events_url": "https://api.github.com/users/yangp725/received_events",
"repos_url": "https://api.github.com/users/yangp725/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/yangp725/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/yangp725/subscriptions",
"type": "User",
"url": "https://api.github.com/users/yangp725"
} | [] | closed | false | null | [] | null | 2 | "2021-01-13T02:57:40Z" | "2021-01-13T11:17:40Z" | "2021-01-13T11:17:40Z" | NONE | null | null | null | ConnectionError: Couldn't reach https://raw.githubusercontent.com/huggingface/datasets/1.2.0/datasets/swda/swda.py
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1731/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1731/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1730 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1730/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1730/comments | https://api.github.com/repos/huggingface/datasets/issues/1730/events | https://github.com/huggingface/datasets/pull/1730 | 784,617,525 | MDExOlB1bGxSZXF1ZXN0NTUzNzgxMDY0 | 1,730 | Add MNIST dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/35901082?v=4",
"events_url": "https://api.github.com/users/sgugger/events{/privacy}",
"followers_url": "https://api.github.com/users/sgugger/followers",
"following_url": "https://api.github.com/users/sgugger/following{/other_user}",
"gists_url": "https://api.github.com/users/sgugger/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/sgugger",
"id": 35901082,
"login": "sgugger",
"node_id": "MDQ6VXNlcjM1OTAxMDgy",
"organizations_url": "https://api.github.com/users/sgugger/orgs",
"received_events_url": "https://api.github.com/users/sgugger/received_events",
"repos_url": "https://api.github.com/users/sgugger/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/sgugger/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/sgugger/subscriptions",
"type": "User",
"url": "https://api.github.com/users/sgugger"
} | [] | closed | false | null | [] | null | 0 | "2021-01-12T21:48:02Z" | "2021-01-13T10:19:47Z" | "2021-01-13T10:19:46Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1730.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1730",
"merged_at": "2021-01-13T10:19:46Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1730.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1730"
} | This PR adds the MNIST dataset to the library. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 1,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1730/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1730/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1729 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1729/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1729/comments | https://api.github.com/repos/huggingface/datasets/issues/1729/events | https://github.com/huggingface/datasets/issues/1729 | 784,565,898 | MDU6SXNzdWU3ODQ1NjU4OTg= | 1,729 | Is there support for Deep learning datasets? | {
"avatar_url": "https://avatars.githubusercontent.com/u/28235457?v=4",
"events_url": "https://api.github.com/users/pablodz/events{/privacy}",
"followers_url": "https://api.github.com/users/pablodz/followers",
"following_url": "https://api.github.com/users/pablodz/following{/other_user}",
"gists_url": "https://api.github.com/users/pablodz/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/pablodz",
"id": 28235457,
"login": "pablodz",
"node_id": "MDQ6VXNlcjI4MjM1NDU3",
"organizations_url": "https://api.github.com/users/pablodz/orgs",
"received_events_url": "https://api.github.com/users/pablodz/received_events",
"repos_url": "https://api.github.com/users/pablodz/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/pablodz/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/pablodz/subscriptions",
"type": "User",
"url": "https://api.github.com/users/pablodz"
} | [] | closed | false | null | [] | null | 1 | "2021-01-12T20:22:41Z" | "2021-03-31T04:24:07Z" | "2021-03-31T04:24:07Z" | NONE | null | null | null | I looked around this repository and looking the datasets I think that there's no support for images-datasets. Or am I missing something? For example to add a repo like this https://github.com/DZPeru/fish-datasets | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1729/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1729/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1728 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1728/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1728/comments | https://api.github.com/repos/huggingface/datasets/issues/1728/events | https://github.com/huggingface/datasets/issues/1728 | 784,458,342 | MDU6SXNzdWU3ODQ0NTgzNDI= | 1,728 | Add an entry to an arrow dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/18645407?v=4",
"events_url": "https://api.github.com/users/ameet-1997/events{/privacy}",
"followers_url": "https://api.github.com/users/ameet-1997/followers",
"following_url": "https://api.github.com/users/ameet-1997/following{/other_user}",
"gists_url": "https://api.github.com/users/ameet-1997/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ameet-1997",
"id": 18645407,
"login": "ameet-1997",
"node_id": "MDQ6VXNlcjE4NjQ1NDA3",
"organizations_url": "https://api.github.com/users/ameet-1997/orgs",
"received_events_url": "https://api.github.com/users/ameet-1997/received_events",
"repos_url": "https://api.github.com/users/ameet-1997/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ameet-1997/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ameet-1997/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ameet-1997"
} | [] | closed | false | null | [] | null | 5 | "2021-01-12T18:01:47Z" | "2021-01-18T19:15:32Z" | "2021-01-18T19:15:32Z" | NONE | null | null | null | Is it possible to add an entry to a dataset object?
**Motivation: I want to transform the sentences in the dataset and add them to the original dataset**
For example, say we have the following code:
``` python
from datasets import load_dataset
# Load a dataset and print the first examples in the training set
squad_dataset = load_dataset('squad')
print(squad_dataset['train'][0])
```
Is it possible to add an entry to `squad_dataset`? Something like the following?
``` python
squad_dataset.append({'text': "This is a new sentence"})
```
The motivation for doing this is that I want to transform the sentences in the squad dataset and add them to the original dataset.
If the above doesn't work, is there any other way of achieving the motivation mentioned above? Perhaps by creating a new arrow dataset by using the older one and the transformer sentences?
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1728/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1728/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1727 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1727/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1727/comments | https://api.github.com/repos/huggingface/datasets/issues/1727/events | https://github.com/huggingface/datasets/issues/1727 | 784,435,131 | MDU6SXNzdWU3ODQ0MzUxMzE= | 1,727 | BLEURT score calculation raises UnrecognizedFlagError | {
"avatar_url": "https://avatars.githubusercontent.com/u/6603920?v=4",
"events_url": "https://api.github.com/users/nadavo/events{/privacy}",
"followers_url": "https://api.github.com/users/nadavo/followers",
"following_url": "https://api.github.com/users/nadavo/following{/other_user}",
"gists_url": "https://api.github.com/users/nadavo/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/nadavo",
"id": 6603920,
"login": "nadavo",
"node_id": "MDQ6VXNlcjY2MDM5MjA=",
"organizations_url": "https://api.github.com/users/nadavo/orgs",
"received_events_url": "https://api.github.com/users/nadavo/received_events",
"repos_url": "https://api.github.com/users/nadavo/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/nadavo/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/nadavo/subscriptions",
"type": "User",
"url": "https://api.github.com/users/nadavo"
} | [] | closed | false | null | [] | null | 10 | "2021-01-12T17:27:02Z" | "2022-06-01T16:06:02Z" | "2022-06-01T16:06:02Z" | NONE | null | null | null | Calling the `compute` method for **bleurt** metric fails with an `UnrecognizedFlagError` for `FLAGS.bleurt_batch_size`.
My environment:
```
python==3.8.5
datasets==1.2.0
tensorflow==2.3.1
cudatoolkit==11.0.221
```
Test code for reproducing the error:
```
from datasets import load_metric
bleurt = load_metric('bleurt')
gen_text = "I am walking on the promenade today"
ref_text = "I am walking along the promenade on this sunny day"
bleurt.compute(predictions=[test_text], references=[test_text])
```
Error Output:
```
Using default BLEURT-Base checkpoint for sequence maximum length 128. You can use a bigger model for better results with e.g.: datasets.load_metric('bleurt', 'bleurt-large-512').
INFO:tensorflow:Reading checkpoint /home/ubuntu/.cache/huggingface/metrics/bleurt/default/downloads/extracted/9aee35580225730ac5422599f35c4986e4c49cafd08082123342b1019720dac4/bleurt-base-128.
INFO:tensorflow:Config file found, reading.
INFO:tensorflow:Will load checkpoint bert_custom
INFO:tensorflow:Performs basic checks...
INFO:tensorflow:... name:bert_custom
INFO:tensorflow:... vocab_file:vocab.txt
INFO:tensorflow:... bert_config_file:bert_config.json
INFO:tensorflow:... do_lower_case:True
INFO:tensorflow:... max_seq_length:128
INFO:tensorflow:Creating BLEURT scorer.
INFO:tensorflow:Loading model...
INFO:tensorflow:BLEURT initialized.
---------------------------------------------------------------------------
UnrecognizedFlagError Traceback (most recent call last)
<ipython-input-12-8b3f4322318a> in <module>
2 gen_text = "I am walking on the promenade today"
3 ref_text = "I am walking along the promenade on this sunny day"
----> 4 bleurt.compute(predictions=[gen_text], references=[ref_text])
~/anaconda3/envs/noved/lib/python3.8/site-packages/datasets/metric.py in compute(self, *args, **kwargs)
396 references = self.data["references"]
397 with temp_seed(self.seed):
--> 398 output = self._compute(predictions=predictions, references=references, **kwargs)
399
400 if self.buf_writer is not None:
~/.cache/huggingface/modules/datasets_modules/metrics/bleurt/b1de33e1cbbcb1dbe276c887efa1fad68c6aff913885108078fa1ad408908778/bleurt.py in _compute(self, predictions, references)
103
104 def _compute(self, predictions, references):
--> 105 scores = self.scorer.score(references=references, candidates=predictions)
106 return {"scores": scores}
~/anaconda3/envs/noved/lib/python3.8/site-packages/bleurt/score.py in score(self, references, candidates, batch_size)
164 """
165 if not batch_size:
--> 166 batch_size = FLAGS.bleurt_batch_size
167
168 candidates, references = list(candidates), list(references)
~/anaconda3/envs/noved/lib/python3.8/site-packages/tensorflow/python/platform/flags.py in __getattr__(self, name)
83 # a flag.
84 if not wrapped.is_parsed():
---> 85 wrapped(_sys.argv)
86 return wrapped.__getattr__(name)
87
~/anaconda3/envs/noved/lib/python3.8/site-packages/absl/flags/_flagvalues.py in __call__(self, argv, known_only)
643 for name, value in unknown_flags:
644 suggestions = _helpers.get_flag_suggestions(name, list(self))
--> 645 raise _exceptions.UnrecognizedFlagError(
646 name, value, suggestions=suggestions)
647
UnrecognizedFlagError: Unknown command line flag 'f'
```
Possible Fix:
Modify `_compute` method https://github.com/huggingface/datasets/blob/7e64851a12263dc74d41c668167918484c8000ab/metrics/bleurt/bleurt.py#L104
to receive a `batch_size` argument, for example:
```
def _compute(self, predictions, references, batch_size=1):
scores = self.scorer.score(references=references, candidates=predictions, batch_size=batch_size)
return {"scores": scores}
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1727/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1727/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1726 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1726/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1726/comments | https://api.github.com/repos/huggingface/datasets/issues/1726/events | https://github.com/huggingface/datasets/pull/1726 | 784,336,370 | MDExOlB1bGxSZXF1ZXN0NTUzNTQ0ODg4 | 1,726 | Offline loading | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 6 | "2021-01-12T15:21:57Z" | "2022-02-15T10:32:10Z" | "2021-01-19T16:42:32Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1726.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1726",
"merged_at": "2021-01-19T16:42:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1726.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1726"
} | As discussed in #824 it would be cool to make the library work in offline mode.
Currently if there's not internet connection then modules (datasets or metrics) that have already been loaded in the past can't be loaded and it raises a ConnectionError.
This is because `prepare_module` fetches online for the latest version of the module.
To make it work in offline mode one suggestion was to reload the latest local version of the module.
I implemented that and I also raise a warning saying that the module that is loaded is the latest local version.
```python
logger.warning(
f"Using the latest cached version of the module from {cached_module_path} since it "
f"couldn't be found locally at {input_path} or remotely ({error_type_that_prevented_reaching_out_remote_stuff})."
)
```
I added tests to make sure it works as expected and I needed to do a few changes in the code to be able to test things properly. In particular I added a parameter `hf_modules_cache` to `init_dynamic_modules` for testing purposes. It makes it possible to have temporary modules caches for testing.
I also added a `offline` context utility that allows to test part of the code by making all the requests fail as if there was no internet.
Close #824, close #761. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 1,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1726/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1726/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1725 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1725/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1725/comments | https://api.github.com/repos/huggingface/datasets/issues/1725/events | https://github.com/huggingface/datasets/issues/1725 | 784,182,273 | MDU6SXNzdWU3ODQxODIyNzM= | 1,725 | load the local dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/41193842?v=4",
"events_url": "https://api.github.com/users/xinjicong/events{/privacy}",
"followers_url": "https://api.github.com/users/xinjicong/followers",
"following_url": "https://api.github.com/users/xinjicong/following{/other_user}",
"gists_url": "https://api.github.com/users/xinjicong/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/xinjicong",
"id": 41193842,
"login": "xinjicong",
"node_id": "MDQ6VXNlcjQxMTkzODQy",
"organizations_url": "https://api.github.com/users/xinjicong/orgs",
"received_events_url": "https://api.github.com/users/xinjicong/received_events",
"repos_url": "https://api.github.com/users/xinjicong/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/xinjicong/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/xinjicong/subscriptions",
"type": "User",
"url": "https://api.github.com/users/xinjicong"
} | [] | closed | false | null | [] | null | 7 | "2021-01-12T12:12:55Z" | "2022-06-01T16:00:59Z" | "2022-06-01T16:00:59Z" | NONE | null | null | null | your guidebook's example is like
>>>from datasets import load_dataset
>>> dataset = load_dataset('json', data_files='my_file.json')
but the first arg is path...
so how should i do if i want to load the local dataset for model training?
i will be grateful if you can help me handle this problem!
thanks a lot! | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1725/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1725/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1723 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1723/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1723/comments | https://api.github.com/repos/huggingface/datasets/issues/1723/events | https://github.com/huggingface/datasets/pull/1723 | 783,982,100 | MDExOlB1bGxSZXF1ZXN0NTUzMjQ4MzU1 | 1,723 | ADD S3 support for downloading and uploading processed datasets | {
"avatar_url": "https://avatars.githubusercontent.com/u/32632186?v=4",
"events_url": "https://api.github.com/users/philschmid/events{/privacy}",
"followers_url": "https://api.github.com/users/philschmid/followers",
"following_url": "https://api.github.com/users/philschmid/following{/other_user}",
"gists_url": "https://api.github.com/users/philschmid/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/philschmid",
"id": 32632186,
"login": "philschmid",
"node_id": "MDQ6VXNlcjMyNjMyMTg2",
"organizations_url": "https://api.github.com/users/philschmid/orgs",
"received_events_url": "https://api.github.com/users/philschmid/received_events",
"repos_url": "https://api.github.com/users/philschmid/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/philschmid/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/philschmid/subscriptions",
"type": "User",
"url": "https://api.github.com/users/philschmid"
} | [] | closed | false | null | [] | null | 1 | "2021-01-12T07:17:34Z" | "2021-01-26T17:02:08Z" | "2021-01-26T17:02:08Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1723.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1723",
"merged_at": "2021-01-26T17:02:07Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1723.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1723"
} | # What does this PR do?
This PR adds the functionality to load and save `datasets` from and to s3.
You can save `datasets` with either `Dataset.save_to_disk()` or `DatasetDict.save_to_disk`.
You can load `datasets` with either `load_from_disk` or `Dataset.load_from_disk()`, `DatasetDict.load_from_disk()`.
Loading `csv` or `json` datasets from s3 is not implemented.
To save/load datasets to s3 you either need to provide an `aws_profile`, which is set up on your machine, per default it uses the `default` profile or you have to pass an `aws_access_key_id` and `aws_secret_access_key`.
The implementation was done with the `fsspec` and `boto3`.
### Example `aws_profile` :
<details>
```python
dataset.save_to_disk("s3://moto-mock-s3-bucket/datasets/sdk", aws_profile="hf-sm")
load_from_disk("s3://moto-mock-s3-bucket/datasets/sdk", aws_profile="hf-sm")
```
</details>
### Example `aws_access_key_id` and `aws_secret_access_key` :
<details>
```python
dataset.save_to_disk("s3://moto-mock-s3-bucket/datasets/sdk",
aws_access_key_id="fake_access_key",
aws_secret_access_key="fake_secret_key"
)
load_from_disk("s3://moto-mock-s3-bucket/datasets/sdk",
aws_access_key_id="fake_access_key",
aws_secret_access_key="fake_secret_key"
)
```
</details>
If you want to load a dataset from a public s3 bucket you can pass `anon=True`
### Example `anon=True` :
<details>
```python
dataset.save_to_disk("s3://moto-mock-s3-bucket/datasets/sdk", aws_profile="hf-sm")
load_from_disk("s3://moto-mock-s3-bucketdatasets/sdk",anon=True)
```
</details>
### Full Example
```python
import datasets
dataset = datasets.load_dataset("imdb")
print(f"DatasetDict contains {len(dataset)} datasets")
print(f"train Dataset has the size of: {len(dataset['train'])}")
dataset.save_to_disk("s3://moto-mock-s3-bucket/datasets/sdk", aws_profile="hf-sm")
remote_dataset = datasets.load_from_disk("s3://moto-mock-s3-bucket/datasets/sdk", aws_profile="hf-sm")
print(f"DatasetDict contains {len(remote_dataset)} datasets")
print(f"train Dataset has the size of: {len(remote_dataset['train'])}")
```
Related to #878
I would also adjust the documentation after the code would be reviewed, as long as I leave the PR in "draft" status. Something that we can consider is renaming the functions and changing the `_disk` maybe to `_filesystem`
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 3,
"total_count": 3,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1723/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1723/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1724 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1724/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1724/comments | https://api.github.com/repos/huggingface/datasets/issues/1724/events | https://github.com/huggingface/datasets/issues/1724 | 784,023,338 | MDU6SXNzdWU3ODQwMjMzMzg= | 1,724 | could not run models on a offline server successfully | {
"avatar_url": "https://avatars.githubusercontent.com/u/49967236?v=4",
"events_url": "https://api.github.com/users/lkcao/events{/privacy}",
"followers_url": "https://api.github.com/users/lkcao/followers",
"following_url": "https://api.github.com/users/lkcao/following{/other_user}",
"gists_url": "https://api.github.com/users/lkcao/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lkcao",
"id": 49967236,
"login": "lkcao",
"node_id": "MDQ6VXNlcjQ5OTY3MjM2",
"organizations_url": "https://api.github.com/users/lkcao/orgs",
"received_events_url": "https://api.github.com/users/lkcao/received_events",
"repos_url": "https://api.github.com/users/lkcao/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lkcao/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lkcao/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lkcao"
} | [] | closed | false | null | [] | null | 6 | "2021-01-12T06:08:06Z" | "2022-10-05T12:39:07Z" | "2022-10-05T12:39:07Z" | NONE | null | null | null | Hi, I really need your help about this.
I am trying to fine-tuning a RoBERTa on a remote server, which is strictly banning internet. I try to install all the packages by hand and try to run run_mlm.py on the server. It works well on colab, but when I try to run it on this offline server, it shows:

is there anything I can do? Is it possible to download all the things in cache and upload it to the server? Please help me out... | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 1,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1724/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1724/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1722 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1722/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1722/comments | https://api.github.com/repos/huggingface/datasets/issues/1722/events | https://github.com/huggingface/datasets/pull/1722 | 783,921,679 | MDExOlB1bGxSZXF1ZXN0NTUzMTk3MTg4 | 1,722 | Added unfiltered versions of the Wiki-Auto training data for the GEM simplification task. | {
"avatar_url": "https://avatars.githubusercontent.com/u/11708999?v=4",
"events_url": "https://api.github.com/users/mounicam/events{/privacy}",
"followers_url": "https://api.github.com/users/mounicam/followers",
"following_url": "https://api.github.com/users/mounicam/following{/other_user}",
"gists_url": "https://api.github.com/users/mounicam/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/mounicam",
"id": 11708999,
"login": "mounicam",
"node_id": "MDQ6VXNlcjExNzA4OTk5",
"organizations_url": "https://api.github.com/users/mounicam/orgs",
"received_events_url": "https://api.github.com/users/mounicam/received_events",
"repos_url": "https://api.github.com/users/mounicam/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/mounicam/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/mounicam/subscriptions",
"type": "User",
"url": "https://api.github.com/users/mounicam"
} | [] | closed | false | null | [] | null | 1 | "2021-01-12T05:26:04Z" | "2021-01-12T18:14:53Z" | "2021-01-12T17:35:57Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1722.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1722",
"merged_at": "2021-01-12T17:35:57Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1722.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1722"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1722/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1722/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1721 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1721/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1721/comments | https://api.github.com/repos/huggingface/datasets/issues/1721/events | https://github.com/huggingface/datasets/pull/1721 | 783,828,428 | MDExOlB1bGxSZXF1ZXN0NTUzMTIyODQ5 | 1,721 | [Scientific papers] Mirror datasets zip | {
"avatar_url": "https://avatars.githubusercontent.com/u/23423619?v=4",
"events_url": "https://api.github.com/users/patrickvonplaten/events{/privacy}",
"followers_url": "https://api.github.com/users/patrickvonplaten/followers",
"following_url": "https://api.github.com/users/patrickvonplaten/following{/other_user}",
"gists_url": "https://api.github.com/users/patrickvonplaten/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/patrickvonplaten",
"id": 23423619,
"login": "patrickvonplaten",
"node_id": "MDQ6VXNlcjIzNDIzNjE5",
"organizations_url": "https://api.github.com/users/patrickvonplaten/orgs",
"received_events_url": "https://api.github.com/users/patrickvonplaten/received_events",
"repos_url": "https://api.github.com/users/patrickvonplaten/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/patrickvonplaten/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/patrickvonplaten/subscriptions",
"type": "User",
"url": "https://api.github.com/users/patrickvonplaten"
} | [] | closed | false | null | [] | null | 4 | "2021-01-12T01:15:40Z" | "2021-01-12T11:49:15Z" | "2021-01-12T11:41:47Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1721.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1721",
"merged_at": "2021-01-12T11:41:47Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1721.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1721"
} | Datasets were uploading to https://s3.amazonaws.com/datasets.huggingface.co/scientific_papers/1.1.1/arxiv-dataset.zip and https://s3.amazonaws.com/datasets.huggingface.co/scientific_papers/1.1.1/pubmed-dataset.zip respectively to escape google drive quota and enable faster download. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1721/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1721/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1720 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1720/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1720/comments | https://api.github.com/repos/huggingface/datasets/issues/1720/events | https://github.com/huggingface/datasets/pull/1720 | 783,721,833 | MDExOlB1bGxSZXF1ZXN0NTUzMDM0MzYx | 1,720 | Adding the NorNE dataset for NER | {
"avatar_url": "https://avatars.githubusercontent.com/u/173537?v=4",
"events_url": "https://api.github.com/users/versae/events{/privacy}",
"followers_url": "https://api.github.com/users/versae/followers",
"following_url": "https://api.github.com/users/versae/following{/other_user}",
"gists_url": "https://api.github.com/users/versae/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/versae",
"id": 173537,
"login": "versae",
"node_id": "MDQ6VXNlcjE3MzUzNw==",
"organizations_url": "https://api.github.com/users/versae/orgs",
"received_events_url": "https://api.github.com/users/versae/received_events",
"repos_url": "https://api.github.com/users/versae/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/versae/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/versae/subscriptions",
"type": "User",
"url": "https://api.github.com/users/versae"
} | [] | closed | false | null | [] | null | 13 | "2021-01-11T21:34:13Z" | "2021-03-31T14:23:49Z" | "2021-03-31T14:13:17Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1720.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1720",
"merged_at": null,
"patch_url": "https://github.com/huggingface/datasets/pull/1720.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1720"
} | NorNE is a manually annotated corpus of named entities which extends the annotation of the existing Norwegian Dependency Treebank. Comprising both of the official standards of written Norwegian (Bokmål and Nynorsk), the corpus contains around 600,000 tokens and annotates a rich set of entity types including persons, organizations, locations, geo-political entities, products, and events, in addition to a class corresponding to nominals derived from names. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1720/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1720/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1719 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1719/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1719/comments | https://api.github.com/repos/huggingface/datasets/issues/1719/events | https://github.com/huggingface/datasets/pull/1719 | 783,557,542 | MDExOlB1bGxSZXF1ZXN0NTUyODk3MzY4 | 1,719 | Fix column list comparison in transmit format | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-11T17:23:56Z" | "2021-01-11T18:45:03Z" | "2021-01-11T18:45:02Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1719.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1719",
"merged_at": "2021-01-11T18:45:02Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1719.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1719"
} | As noticed in #1718 the cache might not reload the cache files when new columns were added.
This is because of an issue in `transmit_format` where the column list comparison fails because the order was not deterministic. This causes the `transmit_format` to apply an unnecessary `set_format` transform with shuffled column names.
I fixed that by sorting the columns for the comparison and added a test.
To properly test that I added a third column `col_3` to the dummy_dataset used for tests. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1719/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1719/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1718 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1718/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1718/comments | https://api.github.com/repos/huggingface/datasets/issues/1718/events | https://github.com/huggingface/datasets/issues/1718 | 783,474,753 | MDU6SXNzdWU3ODM0NzQ3NTM= | 1,718 | Possible cache miss in datasets | {
"avatar_url": "https://avatars.githubusercontent.com/u/18296312?v=4",
"events_url": "https://api.github.com/users/ofirzaf/events{/privacy}",
"followers_url": "https://api.github.com/users/ofirzaf/followers",
"following_url": "https://api.github.com/users/ofirzaf/following{/other_user}",
"gists_url": "https://api.github.com/users/ofirzaf/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ofirzaf",
"id": 18296312,
"login": "ofirzaf",
"node_id": "MDQ6VXNlcjE4Mjk2MzEy",
"organizations_url": "https://api.github.com/users/ofirzaf/orgs",
"received_events_url": "https://api.github.com/users/ofirzaf/received_events",
"repos_url": "https://api.github.com/users/ofirzaf/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ofirzaf/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ofirzaf/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ofirzaf"
} | [] | closed | false | null | [] | null | 18 | "2021-01-11T15:37:31Z" | "2022-06-29T14:54:42Z" | "2021-01-26T02:47:59Z" | NONE | null | null | null | Hi,
I am using the datasets package and even though I run the same data processing functions, datasets always recomputes the function instead of using cache.
I have attached an example script that for me reproduces the problem.
In the attached example the second map function always recomputes instead of loading from cache.
Is this a bug or am I doing something wrong?
Is there a way for fix this and avoid all the recomputation?
Thanks
Edit:
transformers==3.5.1
datasets==1.2.0
```
from datasets import load_dataset
from transformers import AutoTokenizer
datasets = load_dataset('wikitext', 'wikitext-103-raw-v1')
tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased', use_fast=True)
column_names = datasets["train"].column_names
text_column_name = "text" if "text" in column_names else column_names[0]
def tokenize_function(examples):
return tokenizer(examples[text_column_name], return_special_tokens_mask=True)
tokenized_datasets = datasets.map(
tokenize_function,
batched=True,
num_proc=60,
remove_columns=[text_column_name],
load_from_cache_file=True,
)
max_seq_length = tokenizer.model_max_length
def group_texts(examples):
# Concatenate all texts.
concatenated_examples = {
k: sum(examples[k], []) for k in examples.keys()}
total_length = len(concatenated_examples[list(examples.keys())[0]])
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
# customize this part to your needs.
total_length = (total_length // max_seq_length) * max_seq_length
# Split by chunks of max_len.
result = {
k: [t[i: i + max_seq_length]
for i in range(0, total_length, max_seq_length)]
for k, t in concatenated_examples.items()
}
return result
tokenized_datasets = tokenized_datasets.map(
group_texts,
batched=True,
num_proc=60,
load_from_cache_file=True,
)
print(tokenized_datasets)
print('finished')
``` | {
"+1": 2,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 2,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1718/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1718/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1717 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1717/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1717/comments | https://api.github.com/repos/huggingface/datasets/issues/1717/events | https://github.com/huggingface/datasets/issues/1717 | 783,074,255 | MDU6SXNzdWU3ODMwNzQyNTU= | 1,717 | SciFact dataset - minor changes | {
"avatar_url": "https://avatars.githubusercontent.com/u/3091916?v=4",
"events_url": "https://api.github.com/users/dwadden/events{/privacy}",
"followers_url": "https://api.github.com/users/dwadden/followers",
"following_url": "https://api.github.com/users/dwadden/following{/other_user}",
"gists_url": "https://api.github.com/users/dwadden/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/dwadden",
"id": 3091916,
"login": "dwadden",
"node_id": "MDQ6VXNlcjMwOTE5MTY=",
"organizations_url": "https://api.github.com/users/dwadden/orgs",
"received_events_url": "https://api.github.com/users/dwadden/received_events",
"repos_url": "https://api.github.com/users/dwadden/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/dwadden/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dwadden/subscriptions",
"type": "User",
"url": "https://api.github.com/users/dwadden"
} | [] | closed | false | null | [] | null | 4 | "2021-01-11T05:26:40Z" | "2021-01-26T02:52:17Z" | "2021-01-26T02:52:17Z" | CONTRIBUTOR | null | null | null | Hi,
SciFact dataset creator here. First of all, thanks for adding the dataset to Huggingface, much appreciated!
I'd like to make a few minor changes, including the citation information and the `_URL` from which to download the dataset. Can I submit a PR for this?
It also looks like the dataset is being downloaded directly from Huggingface's Google cloud account rather than via the `_URL` in [scifact.py](https://github.com/huggingface/datasets/blob/master/datasets/scifact/scifact.py). Can you help me update the version on gcloud?
Thanks,
Dave | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1717/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1717/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1716 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1716/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1716/comments | https://api.github.com/repos/huggingface/datasets/issues/1716/events | https://github.com/huggingface/datasets/pull/1716 | 782,819,006 | MDExOlB1bGxSZXF1ZXN0NTUyMjgzNzE5 | 1,716 | Add Hatexplain Dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/48222101?v=4",
"events_url": "https://api.github.com/users/kushal2000/events{/privacy}",
"followers_url": "https://api.github.com/users/kushal2000/followers",
"following_url": "https://api.github.com/users/kushal2000/following{/other_user}",
"gists_url": "https://api.github.com/users/kushal2000/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/kushal2000",
"id": 48222101,
"login": "kushal2000",
"node_id": "MDQ6VXNlcjQ4MjIyMTAx",
"organizations_url": "https://api.github.com/users/kushal2000/orgs",
"received_events_url": "https://api.github.com/users/kushal2000/received_events",
"repos_url": "https://api.github.com/users/kushal2000/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/kushal2000/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/kushal2000/subscriptions",
"type": "User",
"url": "https://api.github.com/users/kushal2000"
} | [] | closed | false | null | [] | null | 0 | "2021-01-10T13:30:01Z" | "2021-01-18T14:21:42Z" | "2021-01-18T14:21:42Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1716.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1716",
"merged_at": "2021-01-18T14:21:42Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1716.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1716"
} | Adding Hatexplain - the first benchmark hate speech dataset covering multiple aspects of the issue | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1716/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1716/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1715 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1715/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1715/comments | https://api.github.com/repos/huggingface/datasets/issues/1715/events | https://github.com/huggingface/datasets/pull/1715 | 782,754,441 | MDExOlB1bGxSZXF1ZXN0NTUyMjM2NDA5 | 1,715 | add Korean intonation-aided intention identification dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/59462357?v=4",
"events_url": "https://api.github.com/users/stevhliu/events{/privacy}",
"followers_url": "https://api.github.com/users/stevhliu/followers",
"following_url": "https://api.github.com/users/stevhliu/following{/other_user}",
"gists_url": "https://api.github.com/users/stevhliu/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/stevhliu",
"id": 59462357,
"login": "stevhliu",
"node_id": "MDQ6VXNlcjU5NDYyMzU3",
"organizations_url": "https://api.github.com/users/stevhliu/orgs",
"received_events_url": "https://api.github.com/users/stevhliu/received_events",
"repos_url": "https://api.github.com/users/stevhliu/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/stevhliu/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/stevhliu/subscriptions",
"type": "User",
"url": "https://api.github.com/users/stevhliu"
} | [] | closed | false | null | [] | null | 0 | "2021-01-10T06:29:04Z" | "2021-09-17T16:54:13Z" | "2021-01-12T17:14:33Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1715.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1715",
"merged_at": "2021-01-12T17:14:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1715.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1715"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1715/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1715/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1714 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1714/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1714/comments | https://api.github.com/repos/huggingface/datasets/issues/1714/events | https://github.com/huggingface/datasets/pull/1714 | 782,416,276 | MDExOlB1bGxSZXF1ZXN0NTUxOTc3MDA0 | 1,714 | Adding adversarialQA dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/15869827?v=4",
"events_url": "https://api.github.com/users/maxbartolo/events{/privacy}",
"followers_url": "https://api.github.com/users/maxbartolo/followers",
"following_url": "https://api.github.com/users/maxbartolo/following{/other_user}",
"gists_url": "https://api.github.com/users/maxbartolo/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/maxbartolo",
"id": 15869827,
"login": "maxbartolo",
"node_id": "MDQ6VXNlcjE1ODY5ODI3",
"organizations_url": "https://api.github.com/users/maxbartolo/orgs",
"received_events_url": "https://api.github.com/users/maxbartolo/received_events",
"repos_url": "https://api.github.com/users/maxbartolo/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/maxbartolo/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/maxbartolo/subscriptions",
"type": "User",
"url": "https://api.github.com/users/maxbartolo"
} | [] | closed | false | null | [] | null | 5 | "2021-01-08T21:46:09Z" | "2021-01-13T16:05:24Z" | "2021-01-13T16:05:24Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1714.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1714",
"merged_at": "2021-01-13T16:05:24Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1714.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1714"
} | Adding the adversarialQA dataset (https://adversarialqa.github.io/) from Beat the AI (https://arxiv.org/abs/2002.00293) | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1714/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1714/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1713 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1713/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1713/comments | https://api.github.com/repos/huggingface/datasets/issues/1713/events | https://github.com/huggingface/datasets/issues/1713 | 782,337,723 | MDU6SXNzdWU3ODIzMzc3MjM= | 1,713 | Installation using conda | {
"avatar_url": "https://avatars.githubusercontent.com/u/9393002?v=4",
"events_url": "https://api.github.com/users/pranav-s/events{/privacy}",
"followers_url": "https://api.github.com/users/pranav-s/followers",
"following_url": "https://api.github.com/users/pranav-s/following{/other_user}",
"gists_url": "https://api.github.com/users/pranav-s/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/pranav-s",
"id": 9393002,
"login": "pranav-s",
"node_id": "MDQ6VXNlcjkzOTMwMDI=",
"organizations_url": "https://api.github.com/users/pranav-s/orgs",
"received_events_url": "https://api.github.com/users/pranav-s/received_events",
"repos_url": "https://api.github.com/users/pranav-s/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/pranav-s/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/pranav-s/subscriptions",
"type": "User",
"url": "https://api.github.com/users/pranav-s"
} | [] | closed | false | null | [] | null | 5 | "2021-01-08T19:12:15Z" | "2021-09-17T12:47:40Z" | "2021-09-17T12:47:40Z" | NONE | null | null | null | Will a conda package for installing datasets be added to the huggingface conda channel? I have installed transformers using conda and would like to use the datasets library to use some of the scripts in the transformers/examples folder but am unable to do so at the moment as datasets can only be installed using pip and using pip in a conda environment is generally a bad idea in my experience. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1713/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1713/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1712 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1712/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1712/comments | https://api.github.com/repos/huggingface/datasets/issues/1712/events | https://github.com/huggingface/datasets/pull/1712 | 782,313,097 | MDExOlB1bGxSZXF1ZXN0NTUxODkxMDk4 | 1,712 | Silicone | {
"avatar_url": "https://avatars.githubusercontent.com/u/1551356?v=4",
"events_url": "https://api.github.com/users/eusip/events{/privacy}",
"followers_url": "https://api.github.com/users/eusip/followers",
"following_url": "https://api.github.com/users/eusip/following{/other_user}",
"gists_url": "https://api.github.com/users/eusip/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/eusip",
"id": 1551356,
"login": "eusip",
"node_id": "MDQ6VXNlcjE1NTEzNTY=",
"organizations_url": "https://api.github.com/users/eusip/orgs",
"received_events_url": "https://api.github.com/users/eusip/received_events",
"repos_url": "https://api.github.com/users/eusip/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/eusip/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/eusip/subscriptions",
"type": "User",
"url": "https://api.github.com/users/eusip"
} | [] | closed | false | null | [] | null | 6 | "2021-01-08T18:24:18Z" | "2021-01-21T14:12:37Z" | "2021-01-21T10:31:11Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1712.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1712",
"merged_at": null,
"patch_url": "https://github.com/huggingface/datasets/pull/1712.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1712"
} | My collaborators and I within the Affective Computing team at Telecom Paris would like to push our spoken dialogue dataset for publication. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 1,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1712/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1712/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1711 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1711/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1711/comments | https://api.github.com/repos/huggingface/datasets/issues/1711/events | https://github.com/huggingface/datasets/pull/1711 | 782,129,083 | MDExOlB1bGxSZXF1ZXN0NTUxNzQxODA2 | 1,711 | Fix windows path scheme in cached path | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-08T13:45:56Z" | "2021-01-11T09:23:20Z" | "2021-01-11T09:23:19Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1711.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1711",
"merged_at": "2021-01-11T09:23:19Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1711.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1711"
} | As noticed in #807 there's currently an issue with `cached_path` not raising `FileNotFoundError` on windows for absolute paths. This is due to the way we check for a path to be local or not. The check on the scheme using urlparse was incomplete.
I fixed this and added tests | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1711/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1711/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1710 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1710/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1710/comments | https://api.github.com/repos/huggingface/datasets/issues/1710/events | https://github.com/huggingface/datasets/issues/1710 | 781,914,951 | MDU6SXNzdWU3ODE5MTQ5NTE= | 1,710 | IsADirectoryError when trying to download C4 | {
"avatar_url": "https://avatars.githubusercontent.com/u/5771366?v=4",
"events_url": "https://api.github.com/users/fredriko/events{/privacy}",
"followers_url": "https://api.github.com/users/fredriko/followers",
"following_url": "https://api.github.com/users/fredriko/following{/other_user}",
"gists_url": "https://api.github.com/users/fredriko/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/fredriko",
"id": 5771366,
"login": "fredriko",
"node_id": "MDQ6VXNlcjU3NzEzNjY=",
"organizations_url": "https://api.github.com/users/fredriko/orgs",
"received_events_url": "https://api.github.com/users/fredriko/received_events",
"repos_url": "https://api.github.com/users/fredriko/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/fredriko/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/fredriko/subscriptions",
"type": "User",
"url": "https://api.github.com/users/fredriko"
} | [] | closed | false | null | [] | null | 2 | "2021-01-08T07:31:30Z" | "2022-08-04T11:56:10Z" | "2022-08-04T11:55:04Z" | NONE | null | null | null | **TLDR**:
I fail to download C4 and see a stacktrace originating in `IsADirectoryError` as an explanation for failure.
How can the problem be fixed?
**VERBOSE**:
I use Python version 3.7 and have the following dependencies listed in my project:
```
datasets==1.2.0
apache-beam==2.26.0
```
When running the following code, where `/data/huggingface/unpacked/` contains a single unzipped `wet.paths` file manually downloaded as per the instructions for C4:
```
from datasets import load_dataset
load_dataset("c4", "en", data_dir="/data/huggingface/unpacked", beam_runner='DirectRunner')
```
I get the following stacktrace:
```
/Users/fredriko/venv/misc/bin/python /Users/fredriko/source/misc/main.py
Downloading and preparing dataset c4/en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to /Users/fredriko/.cache/huggingface/datasets/c4/en/2.3.0/8304cf264cc42bdebcb13fca4b9cb36368a96f557d36f9dc969bebbe2568b283...
Traceback (most recent call last):
File "/Users/fredriko/source/misc/main.py", line 3, in <module>
load_dataset("c4", "en", data_dir="/data/huggingface/unpacked", beam_runner='DirectRunner')
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/load.py", line 612, in load_dataset
ignore_verifications=ignore_verifications,
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/builder.py", line 527, in download_and_prepare
dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/builder.py", line 1066, in _download_and_prepare
pipeline=pipeline,
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/builder.py", line 582, in _download_and_prepare
split_generators = self._split_generators(dl_manager, **split_generators_kwargs)
File "/Users/fredriko/.cache/huggingface/modules/datasets_modules/datasets/c4/8304cf264cc42bdebcb13fca4b9cb36368a96f557d36f9dc969bebbe2568b283/c4.py", line 190, in _split_generators
file_paths = dl_manager.download_and_extract(files_to_download)
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/utils/download_manager.py", line 258, in download_and_extract
return self.extract(self.download(url_or_urls))
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/utils/download_manager.py", line 189, in download
self._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/utils/download_manager.py", line 117, in _record_sizes_checksums
self._recorded_sizes_checksums[str(url)] = get_size_checksum_dict(path)
File "/Users/fredriko/venv/misc/lib/python3.7/site-packages/datasets/utils/info_utils.py", line 80, in get_size_checksum_dict
with open(path, "rb") as f:
IsADirectoryError: [Errno 21] Is a directory: '/'
Process finished with exit code 1
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1710/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1710/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1709 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1709/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1709/comments | https://api.github.com/repos/huggingface/datasets/issues/1709/events | https://github.com/huggingface/datasets/issues/1709 | 781,875,640 | MDU6SXNzdWU3ODE4NzU2NDA= | 1,709 | Databases | {
"avatar_url": "https://avatars.githubusercontent.com/u/68724553?v=4",
"events_url": "https://api.github.com/users/JimmyJim1/events{/privacy}",
"followers_url": "https://api.github.com/users/JimmyJim1/followers",
"following_url": "https://api.github.com/users/JimmyJim1/following{/other_user}",
"gists_url": "https://api.github.com/users/JimmyJim1/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/JimmyJim1",
"id": 68724553,
"login": "JimmyJim1",
"node_id": "MDQ6VXNlcjY4NzI0NTUz",
"organizations_url": "https://api.github.com/users/JimmyJim1/orgs",
"received_events_url": "https://api.github.com/users/JimmyJim1/received_events",
"repos_url": "https://api.github.com/users/JimmyJim1/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/JimmyJim1/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/JimmyJim1/subscriptions",
"type": "User",
"url": "https://api.github.com/users/JimmyJim1"
} | [] | closed | false | null | [] | null | 0 | "2021-01-08T06:14:03Z" | "2021-01-08T09:00:08Z" | "2021-01-08T09:00:08Z" | NONE | null | null | null | ## Adding a Dataset
- **Name:** *name of the dataset*
- **Description:** *short description of the dataset (or link to social media or blog post)*
- **Paper:** *link to the dataset paper if available*
- **Data:** *link to the Github repository or current dataset location*
- **Motivation:** *what are some good reasons to have this dataset*
Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md). | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1709/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1709/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1708 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1708/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1708/comments | https://api.github.com/repos/huggingface/datasets/issues/1708/events | https://github.com/huggingface/datasets/issues/1708 | 781,631,455 | MDU6SXNzdWU3ODE2MzE0NTU= | 1,708 | <html dir="ltr" lang="en" class="focus-outline-visible"><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> | {
"avatar_url": "https://avatars.githubusercontent.com/u/77126849?v=4",
"events_url": "https://api.github.com/users/Louiejay54/events{/privacy}",
"followers_url": "https://api.github.com/users/Louiejay54/followers",
"following_url": "https://api.github.com/users/Louiejay54/following{/other_user}",
"gists_url": "https://api.github.com/users/Louiejay54/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/Louiejay54",
"id": 77126849,
"login": "Louiejay54",
"node_id": "MDQ6VXNlcjc3MTI2ODQ5",
"organizations_url": "https://api.github.com/users/Louiejay54/orgs",
"received_events_url": "https://api.github.com/users/Louiejay54/received_events",
"repos_url": "https://api.github.com/users/Louiejay54/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/Louiejay54/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Louiejay54/subscriptions",
"type": "User",
"url": "https://api.github.com/users/Louiejay54"
} | [] | closed | false | null | [] | null | 0 | "2021-01-07T21:45:24Z" | "2021-01-08T09:00:01Z" | "2021-01-08T09:00:01Z" | NONE | null | null | null | ## Adding a Dataset
- **Name:** *name of the dataset*
- **Description:** *short description of the dataset (or link to social media or blog post)*
- **Paper:** *link to the dataset paper if available*
- **Data:** *link to the Github repository or current dataset location*
- **Motivation:** *what are some good reasons to have this dataset*
Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md). | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1708/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1708/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1707 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1707/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1707/comments | https://api.github.com/repos/huggingface/datasets/issues/1707/events | https://github.com/huggingface/datasets/pull/1707 | 781,507,545 | MDExOlB1bGxSZXF1ZXN0NTUxMjE5MDk2 | 1,707 | Added generated READMEs for datasets that were missing one. | {
"avatar_url": "https://avatars.githubusercontent.com/u/272253?v=4",
"events_url": "https://api.github.com/users/madlag/events{/privacy}",
"followers_url": "https://api.github.com/users/madlag/followers",
"following_url": "https://api.github.com/users/madlag/following{/other_user}",
"gists_url": "https://api.github.com/users/madlag/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/madlag",
"id": 272253,
"login": "madlag",
"node_id": "MDQ6VXNlcjI3MjI1Mw==",
"organizations_url": "https://api.github.com/users/madlag/orgs",
"received_events_url": "https://api.github.com/users/madlag/received_events",
"repos_url": "https://api.github.com/users/madlag/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/madlag/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/madlag/subscriptions",
"type": "User",
"url": "https://api.github.com/users/madlag"
} | [] | closed | false | null | [] | null | 1 | "2021-01-07T18:10:06Z" | "2021-01-18T14:32:33Z" | "2021-01-18T14:32:33Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1707.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1707",
"merged_at": "2021-01-18T14:32:33Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1707.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1707"
} | This is it: we worked on a generator with Yacine @yjernite , and we generated dataset cards for all missing ones (161), with all the information we could gather from datasets repository, and using dummy_data to generate examples when possible.
Code is available here for the moment: https://github.com/madlag/datasets_readme_generator .
We will move it to a Hugging Face repository and to https://huggingface.co/datasets/card-creator/ later.
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 2,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 2,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1707/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1707/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1706 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1706/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1706/comments | https://api.github.com/repos/huggingface/datasets/issues/1706/events | https://github.com/huggingface/datasets/issues/1706 | 781,494,476 | MDU6SXNzdWU3ODE0OTQ0NzY= | 1,706 | Error when downloading a large dataset on slow connection. | {
"avatar_url": "https://avatars.githubusercontent.com/u/23355969?v=4",
"events_url": "https://api.github.com/users/lucadiliello/events{/privacy}",
"followers_url": "https://api.github.com/users/lucadiliello/followers",
"following_url": "https://api.github.com/users/lucadiliello/following{/other_user}",
"gists_url": "https://api.github.com/users/lucadiliello/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lucadiliello",
"id": 23355969,
"login": "lucadiliello",
"node_id": "MDQ6VXNlcjIzMzU1OTY5",
"organizations_url": "https://api.github.com/users/lucadiliello/orgs",
"received_events_url": "https://api.github.com/users/lucadiliello/received_events",
"repos_url": "https://api.github.com/users/lucadiliello/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lucadiliello/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lucadiliello/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lucadiliello"
} | [] | open | false | null | [] | null | 1 | "2021-01-07T17:48:15Z" | "2021-01-13T10:35:02Z" | null | CONTRIBUTOR | null | null | null | I receive the following error after about an hour trying to download the `openwebtext` dataset.
The code used is:
```python
import datasets
datasets.load_dataset("openwebtext")
```
> Traceback (most recent call last): [4/28]
> File "<stdin>", line 1, in <module>
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/load.py", line 610, in load_dataset
> ignore_verifications=ignore_verifications,
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/builder.py", line 515, in download_and_prepare
> dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/builder.py", line 570, in _download_and_prepare
> split_generators = self._split_generators(dl_manager, **split_generators_kwargs)
> File "/home/lucadiliello/.cache/huggingface/modules/datasets_modules/datasets/openwebtext/5c636399c7155da97c982d0d70ecdce30fbca66a4eb4fc768ad91f8331edac02/openwebtext.py", line 62, in _split_generators
> dl_dir = dl_manager.download_and_extract(_URL)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/utils/download_manager.py", line 254, in download_and_extract
> return self.extract(self.download(url_or_urls))
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/utils/download_manager.py", line 235, in extract
> num_proc=num_proc,
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/utils/py_utils.py", line 225, in map_nested
> return function(data_struct)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 343, in cached_path
> tar_file.extractall(output_path_extracted)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/tarfile.py", line 2000, in extractall
> numeric_owner=numeric_owner)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/tarfile.py", line 2042, in extract
> numeric_owner=numeric_owner)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/tarfile.py", line 2112, in _extract_member
> self.makefile(tarinfo, targetpath)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/tarfile.py", line 2161, in makefile
> copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/tarfile.py", line 253, in copyfileobj
> buf = src.read(remainder)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/lzma.py", line 200, in read
> return self._buffer.read(size)
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/_compression.py", line 68, in readinto
> data = self.read(len(byte_view))
> File "/home/lucadiliello/anaconda3/envs/nlp/lib/python3.7/_compression.py", line 99, in read
> raise EOFError("Compressed file ended before the "
> EOFError: Compressed file ended before the end-of-stream marker was reached
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1706/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1706/timeline | null | null | false |
https://api.github.com/repos/huggingface/datasets/issues/1705 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1705/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1705/comments | https://api.github.com/repos/huggingface/datasets/issues/1705/events | https://github.com/huggingface/datasets/pull/1705 | 781,474,949 | MDExOlB1bGxSZXF1ZXN0NTUxMTkyMTc4 | 1,705 | Add information about caching and verifications in "Load a Dataset" docs | {
"avatar_url": "https://avatars.githubusercontent.com/u/33657802?v=4",
"events_url": "https://api.github.com/users/SBrandeis/events{/privacy}",
"followers_url": "https://api.github.com/users/SBrandeis/followers",
"following_url": "https://api.github.com/users/SBrandeis/following{/other_user}",
"gists_url": "https://api.github.com/users/SBrandeis/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/SBrandeis",
"id": 33657802,
"login": "SBrandeis",
"node_id": "MDQ6VXNlcjMzNjU3ODAy",
"organizations_url": "https://api.github.com/users/SBrandeis/orgs",
"received_events_url": "https://api.github.com/users/SBrandeis/received_events",
"repos_url": "https://api.github.com/users/SBrandeis/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/SBrandeis/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/SBrandeis/subscriptions",
"type": "User",
"url": "https://api.github.com/users/SBrandeis"
} | [
{
"color": "0075ca",
"default": true,
"description": "Improvements or additions to documentation",
"id": 1935892861,
"name": "documentation",
"node_id": "MDU6TGFiZWwxOTM1ODkyODYx",
"url": "https://api.github.com/repos/huggingface/datasets/labels/documentation"
}
] | closed | false | null | [] | null | 0 | "2021-01-07T17:18:44Z" | "2021-01-12T14:08:01Z" | "2021-01-12T14:08:01Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1705.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1705",
"merged_at": "2021-01-12T14:08:01Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1705.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1705"
} | Related to #215.
Missing improvements from @lhoestq's #1703. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1705/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1705/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1704 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1704/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1704/comments | https://api.github.com/repos/huggingface/datasets/issues/1704/events | https://github.com/huggingface/datasets/pull/1704 | 781,402,757 | MDExOlB1bGxSZXF1ZXN0NTUxMTMyNDI1 | 1,704 | Update XSUM Factuality DatasetCard | {
"avatar_url": "https://avatars.githubusercontent.com/u/50873201?v=4",
"events_url": "https://api.github.com/users/vineeths96/events{/privacy}",
"followers_url": "https://api.github.com/users/vineeths96/followers",
"following_url": "https://api.github.com/users/vineeths96/following{/other_user}",
"gists_url": "https://api.github.com/users/vineeths96/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/vineeths96",
"id": 50873201,
"login": "vineeths96",
"node_id": "MDQ6VXNlcjUwODczMjAx",
"organizations_url": "https://api.github.com/users/vineeths96/orgs",
"received_events_url": "https://api.github.com/users/vineeths96/received_events",
"repos_url": "https://api.github.com/users/vineeths96/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/vineeths96/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/vineeths96/subscriptions",
"type": "User",
"url": "https://api.github.com/users/vineeths96"
} | [] | closed | false | null | [] | null | 0 | "2021-01-07T15:37:14Z" | "2021-01-12T13:30:04Z" | "2021-01-12T13:30:04Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1704.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1704",
"merged_at": "2021-01-12T13:30:04Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1704.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1704"
} | Update XSUM Factuality DatasetCard | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1704/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1704/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1703 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1703/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1703/comments | https://api.github.com/repos/huggingface/datasets/issues/1703/events | https://github.com/huggingface/datasets/pull/1703 | 781,395,146 | MDExOlB1bGxSZXF1ZXN0NTUxMTI2MjA5 | 1,703 | Improvements regarding caching and fingerprinting | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 8 | "2021-01-07T15:26:29Z" | "2021-01-19T17:32:11Z" | "2021-01-19T17:32:10Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1703.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1703",
"merged_at": "2021-01-19T17:32:10Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1703.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1703"
} | This PR adds these features:
- Enable/disable caching
If disabled, the library will no longer reload cached datasets files when applying transforms to the datasets.
It is equivalent to setting `load_from_cache` to `False` in dataset transforms.
```python
from datasets import set_caching_enabled
set_caching_enabled(False)
```
- Allow unpicklable functions in `map`
If an unpicklable function is used, then it's not possible to hash it to update the dataset fingerprint that is used to name cache files. To workaround that, a random fingerprint is generated instead and a warning is raised.
```python
logger.warning(
f"Transform {transform} couldn't be hashed properly, a random hash was used instead. "
"Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting and caching to work. "
"If you reuse this transform, the caching mechanism will consider it to be different from the previous calls and recompute everything."
)
```
and also (open to discussion, EDIT: actually NOT included):
- Enable/disable fingerprinting
Fingerprinting allows to have one deterministic fingerprint per dataset state.
A dataset fingerprint is updated after each transform.
Re-running the same transforms on a dataset in a different session results in the same fingerprint.
Disabling the fingerprinting mechanism makes all the fingerprints random.
Since the caching mechanism uses fingerprints to name the cache files, then cache file names will be different.
Therefore disabling fingerprinting will prevent the caching mechanism from reloading datasets files that have already been computed.
Disabling fingerprinting may speed up the lib for users that don't care about this feature and don't want to use caching.
```python
from datasets import set_fingerprinting_enabled
set_fingerprinting_enabled(False)
```
Other details:
- I renamed the `fingerprint` decorator to `fingerprint_transform` since the name was clearly not explicit. This decorator is used on dataset transform functions to allow them to update fingerprints.
- I added some `ignore_kwargs` when decorating transforms with `fingerprint_transform`, to make the fingerprint update not sensible to kwargs like `load_from_cache` or `cache_file_name`.
Todo: tests for set_fingerprinting_enabled + documentation for all the above features | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1703/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1703/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1702 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1702/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1702/comments | https://api.github.com/repos/huggingface/datasets/issues/1702/events | https://github.com/huggingface/datasets/pull/1702 | 781,383,277 | MDExOlB1bGxSZXF1ZXN0NTUxMTE2NDc0 | 1,702 | Fix importlib metdata import in py38 | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-07T15:10:30Z" | "2021-01-08T10:47:15Z" | "2021-01-08T10:47:15Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1702.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1702",
"merged_at": "2021-01-08T10:47:14Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1702.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1702"
} | In Python 3.8 there's no need to install `importlib_metadata` since it already exists as `importlib.metadata` in the standard lib. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1702/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1702/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1701 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1701/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1701/comments | https://api.github.com/repos/huggingface/datasets/issues/1701/events | https://github.com/huggingface/datasets/issues/1701 | 781,345,717 | MDU6SXNzdWU3ODEzNDU3MTc= | 1,701 | Some datasets miss dataset_infos.json or dummy_data.zip | {
"avatar_url": "https://avatars.githubusercontent.com/u/272253?v=4",
"events_url": "https://api.github.com/users/madlag/events{/privacy}",
"followers_url": "https://api.github.com/users/madlag/followers",
"following_url": "https://api.github.com/users/madlag/following{/other_user}",
"gists_url": "https://api.github.com/users/madlag/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/madlag",
"id": 272253,
"login": "madlag",
"node_id": "MDQ6VXNlcjI3MjI1Mw==",
"organizations_url": "https://api.github.com/users/madlag/orgs",
"received_events_url": "https://api.github.com/users/madlag/received_events",
"repos_url": "https://api.github.com/users/madlag/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/madlag/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/madlag/subscriptions",
"type": "User",
"url": "https://api.github.com/users/madlag"
} | [] | closed | false | null | [] | null | 2 | "2021-01-07T14:17:13Z" | "2022-11-04T15:11:16Z" | "2022-11-04T15:06:00Z" | CONTRIBUTOR | null | null | null | While working on dataset REAME generation script at https://github.com/madlag/datasets_readme_generator , I noticed that some datasets miss a dataset_infos.json :
```
c4
lm1b
reclor
wikihow
```
And some does not have a dummy_data.zip :
```
kor_nli
math_dataset
mlqa
ms_marco
newsgroup
qa4mre
qangaroo
reddit_tifu
super_glue
trivia_qa
web_of_science
wmt14
wmt15
wmt16
wmt17
wmt18
wmt19
xtreme
```
But it seems that some of those last do have a "dummy" directory .
| {
"+1": 1,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1701/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1701/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1700 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1700/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1700/comments | https://api.github.com/repos/huggingface/datasets/issues/1700/events | https://github.com/huggingface/datasets/pull/1700 | 781,333,589 | MDExOlB1bGxSZXF1ZXN0NTUxMDc1NTg2 | 1,700 | Update Curiosity dialogs DatasetCard | {
"avatar_url": "https://avatars.githubusercontent.com/u/50873201?v=4",
"events_url": "https://api.github.com/users/vineeths96/events{/privacy}",
"followers_url": "https://api.github.com/users/vineeths96/followers",
"following_url": "https://api.github.com/users/vineeths96/following{/other_user}",
"gists_url": "https://api.github.com/users/vineeths96/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/vineeths96",
"id": 50873201,
"login": "vineeths96",
"node_id": "MDQ6VXNlcjUwODczMjAx",
"organizations_url": "https://api.github.com/users/vineeths96/orgs",
"received_events_url": "https://api.github.com/users/vineeths96/received_events",
"repos_url": "https://api.github.com/users/vineeths96/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/vineeths96/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/vineeths96/subscriptions",
"type": "User",
"url": "https://api.github.com/users/vineeths96"
} | [] | closed | false | null | [] | null | 0 | "2021-01-07T13:59:27Z" | "2021-01-12T18:51:32Z" | "2021-01-12T18:51:32Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1700.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1700",
"merged_at": "2021-01-12T18:51:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1700.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1700"
} | Update Curiosity dialogs DatasetCard
There are some entries in the data fields section yet to be filled. There is little information regarding those fields. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1700/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1700/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1699 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1699/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1699/comments | https://api.github.com/repos/huggingface/datasets/issues/1699/events | https://github.com/huggingface/datasets/pull/1699 | 781,271,558 | MDExOlB1bGxSZXF1ZXN0NTUxMDIzODE5 | 1,699 | Update DBRD dataset card and download URL | {
"avatar_url": "https://avatars.githubusercontent.com/u/8875786?v=4",
"events_url": "https://api.github.com/users/benjaminvdb/events{/privacy}",
"followers_url": "https://api.github.com/users/benjaminvdb/followers",
"following_url": "https://api.github.com/users/benjaminvdb/following{/other_user}",
"gists_url": "https://api.github.com/users/benjaminvdb/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/benjaminvdb",
"id": 8875786,
"login": "benjaminvdb",
"node_id": "MDQ6VXNlcjg4NzU3ODY=",
"organizations_url": "https://api.github.com/users/benjaminvdb/orgs",
"received_events_url": "https://api.github.com/users/benjaminvdb/received_events",
"repos_url": "https://api.github.com/users/benjaminvdb/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/benjaminvdb/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/benjaminvdb/subscriptions",
"type": "User",
"url": "https://api.github.com/users/benjaminvdb"
} | [] | closed | false | null | [] | null | 1 | "2021-01-07T12:16:43Z" | "2021-01-07T13:41:39Z" | "2021-01-07T13:40:59Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1699.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1699",
"merged_at": "2021-01-07T13:40:59Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1699.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1699"
} | I've added the Dutch Bood Review Dataset (DBRD) during the recent sprint. This pull request makes two minor changes:
1. I'm changing the download URL from Google Drive to the dataset's GitHub release package. This is now possible because of PR #1316.
2. I've updated the dataset card.
Cheers! 😄 | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1699/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1699/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1698 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1698/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1698/comments | https://api.github.com/repos/huggingface/datasets/issues/1698/events | https://github.com/huggingface/datasets/pull/1698 | 781,152,561 | MDExOlB1bGxSZXF1ZXN0NTUwOTI0ODQ3 | 1,698 | Update Coached Conv Pref DatasetCard | {
"avatar_url": "https://avatars.githubusercontent.com/u/50873201?v=4",
"events_url": "https://api.github.com/users/vineeths96/events{/privacy}",
"followers_url": "https://api.github.com/users/vineeths96/followers",
"following_url": "https://api.github.com/users/vineeths96/following{/other_user}",
"gists_url": "https://api.github.com/users/vineeths96/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/vineeths96",
"id": 50873201,
"login": "vineeths96",
"node_id": "MDQ6VXNlcjUwODczMjAx",
"organizations_url": "https://api.github.com/users/vineeths96/orgs",
"received_events_url": "https://api.github.com/users/vineeths96/received_events",
"repos_url": "https://api.github.com/users/vineeths96/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/vineeths96/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/vineeths96/subscriptions",
"type": "User",
"url": "https://api.github.com/users/vineeths96"
} | [] | closed | false | null | [] | null | 1 | "2021-01-07T09:07:16Z" | "2021-01-08T17:04:33Z" | "2021-01-08T17:04:32Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1698.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1698",
"merged_at": "2021-01-08T17:04:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1698.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1698"
} | Update Coached Conversation Preferance DatasetCard | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1698/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1698/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1697 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1697/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1697/comments | https://api.github.com/repos/huggingface/datasets/issues/1697/events | https://github.com/huggingface/datasets/pull/1697 | 781,126,579 | MDExOlB1bGxSZXF1ZXN0NTUwOTAzNzI5 | 1,697 | Update DialogRE DatasetCard | {
"avatar_url": "https://avatars.githubusercontent.com/u/50873201?v=4",
"events_url": "https://api.github.com/users/vineeths96/events{/privacy}",
"followers_url": "https://api.github.com/users/vineeths96/followers",
"following_url": "https://api.github.com/users/vineeths96/following{/other_user}",
"gists_url": "https://api.github.com/users/vineeths96/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/vineeths96",
"id": 50873201,
"login": "vineeths96",
"node_id": "MDQ6VXNlcjUwODczMjAx",
"organizations_url": "https://api.github.com/users/vineeths96/orgs",
"received_events_url": "https://api.github.com/users/vineeths96/received_events",
"repos_url": "https://api.github.com/users/vineeths96/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/vineeths96/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/vineeths96/subscriptions",
"type": "User",
"url": "https://api.github.com/users/vineeths96"
} | [] | closed | false | null | [] | null | 1 | "2021-01-07T08:22:33Z" | "2021-01-07T13:34:28Z" | "2021-01-07T13:34:28Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1697.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1697",
"merged_at": "2021-01-07T13:34:28Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1697.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1697"
} | Update the information in the dataset card for the Dialog RE dataset. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1697/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1697/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1696 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1696/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1696/comments | https://api.github.com/repos/huggingface/datasets/issues/1696/events | https://github.com/huggingface/datasets/issues/1696 | 781,096,918 | MDU6SXNzdWU3ODEwOTY5MTg= | 1,696 | Unable to install datasets | {
"avatar_url": "https://avatars.githubusercontent.com/u/12635475?v=4",
"events_url": "https://api.github.com/users/glee2429/events{/privacy}",
"followers_url": "https://api.github.com/users/glee2429/followers",
"following_url": "https://api.github.com/users/glee2429/following{/other_user}",
"gists_url": "https://api.github.com/users/glee2429/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/glee2429",
"id": 12635475,
"login": "glee2429",
"node_id": "MDQ6VXNlcjEyNjM1NDc1",
"organizations_url": "https://api.github.com/users/glee2429/orgs",
"received_events_url": "https://api.github.com/users/glee2429/received_events",
"repos_url": "https://api.github.com/users/glee2429/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/glee2429/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/glee2429/subscriptions",
"type": "User",
"url": "https://api.github.com/users/glee2429"
} | [] | closed | false | null | [] | null | 4 | "2021-01-07T07:24:37Z" | "2021-01-08T00:33:05Z" | "2021-01-07T22:06:05Z" | NONE | null | null | null | ** Edit **
I believe there's a bug with the package when you're installing it with Python 3.9. I recommend sticking with previous versions. Thanks, @thomwolf for the insight!
**Short description**
I followed the instructions for installing datasets (https://huggingface.co/docs/datasets/installation.html). However, while I tried to download datasets using `pip install datasets` I got a massive error message after getting stuck at "Installing build dependencies..."
I was wondering if this problem can be fixed by creating a virtual environment, but it didn't help. Can anyone offer some advice on how to fix this issue?
Here's an error message:
`(env) Gas-MacBook-Pro:Downloads destiny$ pip install datasets
Collecting datasets
Using cached datasets-1.2.0-py3-none-any.whl (159 kB)
Collecting numpy>=1.17
Using cached numpy-1.19.5-cp39-cp39-macosx_10_9_x86_64.whl (15.6 MB)
Collecting pyarrow>=0.17.1
Using cached pyarrow-2.0.0.tar.gz (58.9 MB)
....
_configtest.c:9:5: warning: incompatible redeclaration of library function 'ceilf' [-Wincompatible-library-redeclaration]
int ceilf (void);
^
_configtest.c:9:5: note: 'ceilf' is a builtin with type 'float (float)'
_configtest.c:10:5: warning: incompatible redeclaration of library function 'rintf' [-Wincompatible-library-redeclaration]
int rintf (void);
^
_configtest.c:10:5: note: 'rintf' is a builtin with type 'float (float)'
_configtest.c:11:5: warning: incompatible redeclaration of library function 'truncf' [-Wincompatible-library-redeclaration]
int truncf (void);
^
_configtest.c:11:5: note: 'truncf' is a builtin with type 'float (float)'
_configtest.c:12:5: warning: incompatible redeclaration of library function 'sqrtf' [-Wincompatible-library-redeclaration]
int sqrtf (void);
^
_configtest.c:12:5: note: 'sqrtf' is a builtin with type 'float (float)'
_configtest.c:13:5: warning: incompatible redeclaration of library function 'log10f' [-Wincompatible-library-redeclaration]
int log10f (void);
^
_configtest.c:13:5: note: 'log10f' is a builtin with type 'float (float)'
_configtest.c:14:5: warning: incompatible redeclaration of library function 'logf' [-Wincompatible-library-redeclaration]
int logf (void);
^
_configtest.c:14:5: note: 'logf' is a builtin with type 'float (float)'
_configtest.c:15:5: warning: incompatible redeclaration of library function 'log1pf' [-Wincompatible-library-redeclaration]
int log1pf (void);
^
_configtest.c:15:5: note: 'log1pf' is a builtin with type 'float (float)'
_configtest.c:16:5: warning: incompatible redeclaration of library function 'expf' [-Wincompatible-library-redeclaration]
int expf (void);
^
_configtest.c:16:5: note: 'expf' is a builtin with type 'float (float)'
_configtest.c:17:5: warning: incompatible redeclaration of library function 'expm1f' [-Wincompatible-library-redeclaration]
int expm1f (void);
^
_configtest.c:17:5: note: 'expm1f' is a builtin with type 'float (float)'
_configtest.c:18:5: warning: incompatible redeclaration of library function 'asinf' [-Wincompatible-library-redeclaration]
int asinf (void);
^
_configtest.c:18:5: note: 'asinf' is a builtin with type 'float (float)'
_configtest.c:19:5: warning: incompatible redeclaration of library function 'acosf' [-Wincompatible-library-redeclaration]
int acosf (void);
^
_configtest.c:19:5: note: 'acosf' is a builtin with type 'float (float)'
_configtest.c:20:5: warning: incompatible redeclaration of library function 'atanf' [-Wincompatible-library-redeclaration]
int atanf (void);
^
_configtest.c:20:5: note: 'atanf' is a builtin with type 'float (float)'
_configtest.c:21:5: warning: incompatible redeclaration of library function 'asinhf' [-Wincompatible-library-redeclaration]
int asinhf (void);
^
_configtest.c:21:5: note: 'asinhf' is a builtin with type 'float (float)'
_configtest.c:22:5: warning: incompatible redeclaration of library function 'acoshf' [-Wincompatible-library-redeclaration]
int acoshf (void);
^
_configtest.c:22:5: note: 'acoshf' is a builtin with type 'float (float)'
_configtest.c:23:5: warning: incompatible redeclaration of library function 'atanhf' [-Wincompatible-library-redeclaration]
int atanhf (void);
^
_configtest.c:23:5: note: 'atanhf' is a builtin with type 'float (float)'
_configtest.c:24:5: warning: incompatible redeclaration of library function 'hypotf' [-Wincompatible-library-redeclaration]
int hypotf (void);
^
_configtest.c:24:5: note: 'hypotf' is a builtin with type 'float (float, float)'
_configtest.c:25:5: warning: incompatible redeclaration of library function 'atan2f' [-Wincompatible-library-redeclaration]
int atan2f (void);
^
_configtest.c:25:5: note: 'atan2f' is a builtin with type 'float (float, float)'
_configtest.c:26:5: warning: incompatible redeclaration of library function 'powf' [-Wincompatible-library-redeclaration]
int powf (void);
^
_configtest.c:26:5: note: 'powf' is a builtin with type 'float (float, float)'
_configtest.c:27:5: warning: incompatible redeclaration of library function 'fmodf' [-Wincompatible-library-redeclaration]
int fmodf (void);
^
_configtest.c:27:5: note: 'fmodf' is a builtin with type 'float (float, float)'
_configtest.c:28:5: warning: incompatible redeclaration of library function 'modff' [-Wincompatible-library-redeclaration]
int modff (void);
^
_configtest.c:28:5: note: 'modff' is a builtin with type 'float (float, float *)'
_configtest.c:29:5: warning: incompatible redeclaration of library function 'frexpf' [-Wincompatible-library-redeclaration]
int frexpf (void);
^
_configtest.c:29:5: note: 'frexpf' is a builtin with type 'float (float, int *)'
_configtest.c:30:5: warning: incompatible redeclaration of library function 'ldexpf' [-Wincompatible-library-redeclaration]
int ldexpf (void);
^
_configtest.c:30:5: note: 'ldexpf' is a builtin with type 'float (float, int)'
_configtest.c:31:5: warning: incompatible redeclaration of library function 'exp2f' [-Wincompatible-library-redeclaration]
int exp2f (void);
^
_configtest.c:31:5: note: 'exp2f' is a builtin with type 'float (float)'
_configtest.c:32:5: warning: incompatible redeclaration of library function 'log2f' [-Wincompatible-library-redeclaration]
int log2f (void);
^
_configtest.c:32:5: note: 'log2f' is a builtin with type 'float (float)'
_configtest.c:33:5: warning: incompatible redeclaration of library function 'copysignf' [-Wincompatible-library-redeclaration]
int copysignf (void);
^
_configtest.c:33:5: note: 'copysignf' is a builtin with type 'float (float, float)'
_configtest.c:34:5: warning: incompatible redeclaration of library function 'nextafterf' [-Wincompatible-library-redeclaration]
int nextafterf (void);
^
_configtest.c:34:5: note: 'nextafterf' is a builtin with type 'float (float, float)'
_configtest.c:35:5: warning: incompatible redeclaration of library function 'cbrtf' [-Wincompatible-library-redeclaration]
int cbrtf (void);
^
_configtest.c:35:5: note: 'cbrtf' is a builtin with type 'float (float)'
35 warnings generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:1:5: warning: incompatible redeclaration of library function 'sinl' [-Wincompatible-library-redeclaration]
int sinl (void);
^
_configtest.c:1:5: note: 'sinl' is a builtin with type 'long double (long double)'
_configtest.c:2:5: warning: incompatible redeclaration of library function 'cosl' [-Wincompatible-library-redeclaration]
int cosl (void);
^
_configtest.c:2:5: note: 'cosl' is a builtin with type 'long double (long double)'
_configtest.c:3:5: warning: incompatible redeclaration of library function 'tanl' [-Wincompatible-library-redeclaration]
int tanl (void);
^
_configtest.c:3:5: note: 'tanl' is a builtin with type 'long double (long double)'
_configtest.c:4:5: warning: incompatible redeclaration of library function 'sinhl' [-Wincompatible-library-redeclaration]
int sinhl (void);
^
_configtest.c:4:5: note: 'sinhl' is a builtin with type 'long double (long double)'
_configtest.c:5:5: warning: incompatible redeclaration of library function 'coshl' [-Wincompatible-library-redeclaration]
int coshl (void);
^
_configtest.c:5:5: note: 'coshl' is a builtin with type 'long double (long double)'
_configtest.c:6:5: warning: incompatible redeclaration of library function 'tanhl' [-Wincompatible-library-redeclaration]
int tanhl (void);
^
_configtest.c:6:5: note: 'tanhl' is a builtin with type 'long double (long double)'
_configtest.c:7:5: warning: incompatible redeclaration of library function 'fabsl' [-Wincompatible-library-redeclaration]
int fabsl (void);
^
_configtest.c:7:5: note: 'fabsl' is a builtin with type 'long double (long double)'
_configtest.c:8:5: warning: incompatible redeclaration of library function 'floorl' [-Wincompatible-library-redeclaration]
int floorl (void);
^
_configtest.c:8:5: note: 'floorl' is a builtin with type 'long double (long double)'
_configtest.c:9:5: warning: incompatible redeclaration of library function 'ceill' [-Wincompatible-library-redeclaration]
int ceill (void);
^
_configtest.c:9:5: note: 'ceill' is a builtin with type 'long double (long double)'
_configtest.c:10:5: warning: incompatible redeclaration of library function 'rintl' [-Wincompatible-library-redeclaration]
int rintl (void);
^
_configtest.c:10:5: note: 'rintl' is a builtin with type 'long double (long double)'
_configtest.c:11:5: warning: incompatible redeclaration of library function 'truncl' [-Wincompatible-library-redeclaration]
int truncl (void);
^
_configtest.c:11:5: note: 'truncl' is a builtin with type 'long double (long double)'
_configtest.c:12:5: warning: incompatible redeclaration of library function 'sqrtl' [-Wincompatible-library-redeclaration]
int sqrtl (void);
^
_configtest.c:12:5: note: 'sqrtl' is a builtin with type 'long double (long double)'
_configtest.c:13:5: warning: incompatible redeclaration of library function 'log10l' [-Wincompatible-library-redeclaration]
int log10l (void);
^
_configtest.c:13:5: note: 'log10l' is a builtin with type 'long double (long double)'
_configtest.c:14:5: warning: incompatible redeclaration of library function 'logl' [-Wincompatible-library-redeclaration]
int logl (void);
^
_configtest.c:14:5: note: 'logl' is a builtin with type 'long double (long double)'
_configtest.c:15:5: warning: incompatible redeclaration of library function 'log1pl' [-Wincompatible-library-redeclaration]
int log1pl (void);
^
_configtest.c:15:5: note: 'log1pl' is a builtin with type 'long double (long double)'
_configtest.c:16:5: warning: incompatible redeclaration of library function 'expl' [-Wincompatible-library-redeclaration]
int expl (void);
^
_configtest.c:16:5: note: 'expl' is a builtin with type 'long double (long double)'
_configtest.c:17:5: warning: incompatible redeclaration of library function 'expm1l' [-Wincompatible-library-redeclaration]
int expm1l (void);
^
_configtest.c:17:5: note: 'expm1l' is a builtin with type 'long double (long double)'
_configtest.c:18:5: warning: incompatible redeclaration of library function 'asinl' [-Wincompatible-library-redeclaration]
int asinl (void);
^
_configtest.c:18:5: note: 'asinl' is a builtin with type 'long double (long double)'
_configtest.c:19:5: warning: incompatible redeclaration of library function 'acosl' [-Wincompatible-library-redeclaration]
int acosl (void);
^
_configtest.c:19:5: note: 'acosl' is a builtin with type 'long double (long double)'
_configtest.c:20:5: warning: incompatible redeclaration of library function 'atanl' [-Wincompatible-library-redeclaration]
int atanl (void);
^
_configtest.c:20:5: note: 'atanl' is a builtin with type 'long double (long double)'
_configtest.c:21:5: warning: incompatible redeclaration of library function 'asinhl' [-Wincompatible-library-redeclaration]
int asinhl (void);
^
_configtest.c:21:5: note: 'asinhl' is a builtin with type 'long double (long double)'
_configtest.c:22:5: warning: incompatible redeclaration of library function 'acoshl' [-Wincompatible-library-redeclaration]
int acoshl (void);
^
_configtest.c:22:5: note: 'acoshl' is a builtin with type 'long double (long double)'
_configtest.c:23:5: warning: incompatible redeclaration of library function 'atanhl' [-Wincompatible-library-redeclaration]
int atanhl (void);
^
_configtest.c:23:5: note: 'atanhl' is a builtin with type 'long double (long double)'
_configtest.c:24:5: warning: incompatible redeclaration of library function 'hypotl' [-Wincompatible-library-redeclaration]
int hypotl (void);
^
_configtest.c:24:5: note: 'hypotl' is a builtin with type 'long double (long double, long double)'
_configtest.c:25:5: warning: incompatible redeclaration of library function 'atan2l' [-Wincompatible-library-redeclaration]
int atan2l (void);
^
_configtest.c:25:5: note: 'atan2l' is a builtin with type 'long double (long double, long double)'
_configtest.c:26:5: warning: incompatible redeclaration of library function 'powl' [-Wincompatible-library-redeclaration]
int powl (void);
^
_configtest.c:26:5: note: 'powl' is a builtin with type 'long double (long double, long double)'
_configtest.c:27:5: warning: incompatible redeclaration of library function 'fmodl' [-Wincompatible-library-redeclaration]
int fmodl (void);
^
_configtest.c:27:5: note: 'fmodl' is a builtin with type 'long double (long double, long double)'
_configtest.c:28:5: warning: incompatible redeclaration of library function 'modfl' [-Wincompatible-library-redeclaration]
int modfl (void);
^
_configtest.c:28:5: note: 'modfl' is a builtin with type 'long double (long double, long double *)'
_configtest.c:29:5: warning: incompatible redeclaration of library function 'frexpl' [-Wincompatible-library-redeclaration]
int frexpl (void);
^
_configtest.c:29:5: note: 'frexpl' is a builtin with type 'long double (long double, int *)'
_configtest.c:30:5: warning: incompatible redeclaration of library function 'ldexpl' [-Wincompatible-library-redeclaration]
int ldexpl (void);
^
_configtest.c:30:5: note: 'ldexpl' is a builtin with type 'long double (long double, int)'
_configtest.c:31:5: warning: incompatible redeclaration of library function 'exp2l' [-Wincompatible-library-redeclaration]
int exp2l (void);
^
_configtest.c:31:5: note: 'exp2l' is a builtin with type 'long double (long double)'
_configtest.c:32:5: warning: incompatible redeclaration of library function 'log2l' [-Wincompatible-library-redeclaration]
int log2l (void);
^
_configtest.c:32:5: note: 'log2l' is a builtin with type 'long double (long double)'
_configtest.c:33:5: warning: incompatible redeclaration of library function 'copysignl' [-Wincompatible-library-redeclaration]
int copysignl (void);
^
_configtest.c:33:5: note: 'copysignl' is a builtin with type 'long double (long double, long double)'
_configtest.c:34:5: warning: incompatible redeclaration of library function 'nextafterl' [-Wincompatible-library-redeclaration]
int nextafterl (void);
^
_configtest.c:34:5: note: 'nextafterl' is a builtin with type 'long double (long double, long double)'
_configtest.c:35:5: warning: incompatible redeclaration of library function 'cbrtl' [-Wincompatible-library-redeclaration]
int cbrtl (void);
^
_configtest.c:35:5: note: 'cbrtl' is a builtin with type 'long double (long double)'
35 warnings generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:8:12: error: use of undeclared identifier 'HAVE_DECL_SIGNBIT'
(void) HAVE_DECL_SIGNBIT;
^
1 error generated.
failure.
removing: _configtest.c _configtest.o
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:1:5: warning: incompatible redeclaration of library function 'cabs' [-Wincompatible-library-redeclaration]
int cabs (void);
^
_configtest.c:1:5: note: 'cabs' is a builtin with type 'double (_Complex double)'
_configtest.c:2:5: warning: incompatible redeclaration of library function 'cacos' [-Wincompatible-library-redeclaration]
int cacos (void);
^
_configtest.c:2:5: note: 'cacos' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:3:5: warning: incompatible redeclaration of library function 'cacosh' [-Wincompatible-library-redeclaration]
int cacosh (void);
^
_configtest.c:3:5: note: 'cacosh' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:4:5: warning: incompatible redeclaration of library function 'carg' [-Wincompatible-library-redeclaration]
int carg (void);
^
_configtest.c:4:5: note: 'carg' is a builtin with type 'double (_Complex double)'
_configtest.c:5:5: warning: incompatible redeclaration of library function 'casin' [-Wincompatible-library-redeclaration]
int casin (void);
^
_configtest.c:5:5: note: 'casin' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:6:5: warning: incompatible redeclaration of library function 'casinh' [-Wincompatible-library-redeclaration]
int casinh (void);
^
_configtest.c:6:5: note: 'casinh' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:7:5: warning: incompatible redeclaration of library function 'catan' [-Wincompatible-library-redeclaration]
int catan (void);
^
_configtest.c:7:5: note: 'catan' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:8:5: warning: incompatible redeclaration of library function 'catanh' [-Wincompatible-library-redeclaration]
int catanh (void);
^
_configtest.c:8:5: note: 'catanh' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:9:5: warning: incompatible redeclaration of library function 'ccos' [-Wincompatible-library-redeclaration]
int ccos (void);
^
_configtest.c:9:5: note: 'ccos' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:10:5: warning: incompatible redeclaration of library function 'ccosh' [-Wincompatible-library-redeclaration]
int ccosh (void);
^
_configtest.c:10:5: note: 'ccosh' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:11:5: warning: incompatible redeclaration of library function 'cexp' [-Wincompatible-library-redeclaration]
int cexp (void);
^
_configtest.c:11:5: note: 'cexp' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:12:5: warning: incompatible redeclaration of library function 'cimag' [-Wincompatible-library-redeclaration]
int cimag (void);
^
_configtest.c:12:5: note: 'cimag' is a builtin with type 'double (_Complex double)'
_configtest.c:13:5: warning: incompatible redeclaration of library function 'clog' [-Wincompatible-library-redeclaration]
int clog (void);
^
_configtest.c:13:5: note: 'clog' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:14:5: warning: incompatible redeclaration of library function 'conj' [-Wincompatible-library-redeclaration]
int conj (void);
^
_configtest.c:14:5: note: 'conj' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:15:5: warning: incompatible redeclaration of library function 'cpow' [-Wincompatible-library-redeclaration]
int cpow (void);
^
_configtest.c:15:5: note: 'cpow' is a builtin with type '_Complex double (_Complex double, _Complex double)'
_configtest.c:16:5: warning: incompatible redeclaration of library function 'cproj' [-Wincompatible-library-redeclaration]
int cproj (void);
^
_configtest.c:16:5: note: 'cproj' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:17:5: warning: incompatible redeclaration of library function 'creal' [-Wincompatible-library-redeclaration]
int creal (void);
^
_configtest.c:17:5: note: 'creal' is a builtin with type 'double (_Complex double)'
_configtest.c:18:5: warning: incompatible redeclaration of library function 'csin' [-Wincompatible-library-redeclaration]
int csin (void);
^
_configtest.c:18:5: note: 'csin' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:19:5: warning: incompatible redeclaration of library function 'csinh' [-Wincompatible-library-redeclaration]
int csinh (void);
^
_configtest.c:19:5: note: 'csinh' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrt' [-Wincompatible-library-redeclaration]
int csqrt (void);
^
_configtest.c:20:5: note: 'csqrt' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:21:5: warning: incompatible redeclaration of library function 'ctan' [-Wincompatible-library-redeclaration]
int ctan (void);
^
_configtest.c:21:5: note: 'ctan' is a builtin with type '_Complex double (_Complex double)'
_configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanh' [-Wincompatible-library-redeclaration]
int ctanh (void);
^
_configtest.c:22:5: note: 'ctanh' is a builtin with type '_Complex double (_Complex double)'
22 warnings generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:1:5: warning: incompatible redeclaration of library function 'cabsf' [-Wincompatible-library-redeclaration]
int cabsf (void);
^
_configtest.c:1:5: note: 'cabsf' is a builtin with type 'float (_Complex float)'
_configtest.c:2:5: warning: incompatible redeclaration of library function 'cacosf' [-Wincompatible-library-redeclaration]
int cacosf (void);
^
_configtest.c:2:5: note: 'cacosf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:3:5: warning: incompatible redeclaration of library function 'cacoshf' [-Wincompatible-library-redeclaration]
int cacoshf (void);
^
_configtest.c:3:5: note: 'cacoshf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:4:5: warning: incompatible redeclaration of library function 'cargf' [-Wincompatible-library-redeclaration]
int cargf (void);
^
_configtest.c:4:5: note: 'cargf' is a builtin with type 'float (_Complex float)'
_configtest.c:5:5: warning: incompatible redeclaration of library function 'casinf' [-Wincompatible-library-redeclaration]
int casinf (void);
^
_configtest.c:5:5: note: 'casinf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:6:5: warning: incompatible redeclaration of library function 'casinhf' [-Wincompatible-library-redeclaration]
int casinhf (void);
^
_configtest.c:6:5: note: 'casinhf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:7:5: warning: incompatible redeclaration of library function 'catanf' [-Wincompatible-library-redeclaration]
int catanf (void);
^
_configtest.c:7:5: note: 'catanf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:8:5: warning: incompatible redeclaration of library function 'catanhf' [-Wincompatible-library-redeclaration]
int catanhf (void);
^
_configtest.c:8:5: note: 'catanhf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:9:5: warning: incompatible redeclaration of library function 'ccosf' [-Wincompatible-library-redeclaration]
int ccosf (void);
^
_configtest.c:9:5: note: 'ccosf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:10:5: warning: incompatible redeclaration of library function 'ccoshf' [-Wincompatible-library-redeclaration]
int ccoshf (void);
^
_configtest.c:10:5: note: 'ccoshf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:11:5: warning: incompatible redeclaration of library function 'cexpf' [-Wincompatible-library-redeclaration]
int cexpf (void);
^
_configtest.c:11:5: note: 'cexpf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:12:5: warning: incompatible redeclaration of library function 'cimagf' [-Wincompatible-library-redeclaration]
int cimagf (void);
^
_configtest.c:12:5: note: 'cimagf' is a builtin with type 'float (_Complex float)'
_configtest.c:13:5: warning: incompatible redeclaration of library function 'clogf' [-Wincompatible-library-redeclaration]
int clogf (void);
^
_configtest.c:13:5: note: 'clogf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:14:5: warning: incompatible redeclaration of library function 'conjf' [-Wincompatible-library-redeclaration]
int conjf (void);
^
_configtest.c:14:5: note: 'conjf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:15:5: warning: incompatible redeclaration of library function 'cpowf' [-Wincompatible-library-redeclaration]
int cpowf (void);
^
_configtest.c:15:5: note: 'cpowf' is a builtin with type '_Complex float (_Complex float, _Complex float)'
_configtest.c:16:5: warning: incompatible redeclaration of library function 'cprojf' [-Wincompatible-library-redeclaration]
int cprojf (void);
^
_configtest.c:16:5: note: 'cprojf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:17:5: warning: incompatible redeclaration of library function 'crealf' [-Wincompatible-library-redeclaration]
int crealf (void);
^
_configtest.c:17:5: note: 'crealf' is a builtin with type 'float (_Complex float)'
_configtest.c:18:5: warning: incompatible redeclaration of library function 'csinf' [-Wincompatible-library-redeclaration]
int csinf (void);
^
_configtest.c:18:5: note: 'csinf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:19:5: warning: incompatible redeclaration of library function 'csinhf' [-Wincompatible-library-redeclaration]
int csinhf (void);
^
_configtest.c:19:5: note: 'csinhf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrtf' [-Wincompatible-library-redeclaration]
int csqrtf (void);
^
_configtest.c:20:5: note: 'csqrtf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:21:5: warning: incompatible redeclaration of library function 'ctanf' [-Wincompatible-library-redeclaration]
int ctanf (void);
^
_configtest.c:21:5: note: 'ctanf' is a builtin with type '_Complex float (_Complex float)'
_configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanhf' [-Wincompatible-library-redeclaration]
int ctanhf (void);
^
_configtest.c:22:5: note: 'ctanhf' is a builtin with type '_Complex float (_Complex float)'
22 warnings generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:1:5: warning: incompatible redeclaration of library function 'cabsl' [-Wincompatible-library-redeclaration]
int cabsl (void);
^
_configtest.c:1:5: note: 'cabsl' is a builtin with type 'long double (_Complex long double)'
_configtest.c:2:5: warning: incompatible redeclaration of library function 'cacosl' [-Wincompatible-library-redeclaration]
int cacosl (void);
^
_configtest.c:2:5: note: 'cacosl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:3:5: warning: incompatible redeclaration of library function 'cacoshl' [-Wincompatible-library-redeclaration]
int cacoshl (void);
^
_configtest.c:3:5: note: 'cacoshl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:4:5: warning: incompatible redeclaration of library function 'cargl' [-Wincompatible-library-redeclaration]
int cargl (void);
^
_configtest.c:4:5: note: 'cargl' is a builtin with type 'long double (_Complex long double)'
_configtest.c:5:5: warning: incompatible redeclaration of library function 'casinl' [-Wincompatible-library-redeclaration]
int casinl (void);
^
_configtest.c:5:5: note: 'casinl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:6:5: warning: incompatible redeclaration of library function 'casinhl' [-Wincompatible-library-redeclaration]
int casinhl (void);
^
_configtest.c:6:5: note: 'casinhl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:7:5: warning: incompatible redeclaration of library function 'catanl' [-Wincompatible-library-redeclaration]
int catanl (void);
^
_configtest.c:7:5: note: 'catanl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:8:5: warning: incompatible redeclaration of library function 'catanhl' [-Wincompatible-library-redeclaration]
int catanhl (void);
^
_configtest.c:8:5: note: 'catanhl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:9:5: warning: incompatible redeclaration of library function 'ccosl' [-Wincompatible-library-redeclaration]
int ccosl (void);
^
_configtest.c:9:5: note: 'ccosl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:10:5: warning: incompatible redeclaration of library function 'ccoshl' [-Wincompatible-library-redeclaration]
int ccoshl (void);
^
_configtest.c:10:5: note: 'ccoshl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:11:5: warning: incompatible redeclaration of library function 'cexpl' [-Wincompatible-library-redeclaration]
int cexpl (void);
^
_configtest.c:11:5: note: 'cexpl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:12:5: warning: incompatible redeclaration of library function 'cimagl' [-Wincompatible-library-redeclaration]
int cimagl (void);
^
_configtest.c:12:5: note: 'cimagl' is a builtin with type 'long double (_Complex long double)'
_configtest.c:13:5: warning: incompatible redeclaration of library function 'clogl' [-Wincompatible-library-redeclaration]
int clogl (void);
^
_configtest.c:13:5: note: 'clogl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:14:5: warning: incompatible redeclaration of library function 'conjl' [-Wincompatible-library-redeclaration]
int conjl (void);
^
_configtest.c:14:5: note: 'conjl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:15:5: warning: incompatible redeclaration of library function 'cpowl' [-Wincompatible-library-redeclaration]
int cpowl (void);
^
_configtest.c:15:5: note: 'cpowl' is a builtin with type '_Complex long double (_Complex long double, _Complex long double)'
_configtest.c:16:5: warning: incompatible redeclaration of library function 'cprojl' [-Wincompatible-library-redeclaration]
int cprojl (void);
^
_configtest.c:16:5: note: 'cprojl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:17:5: warning: incompatible redeclaration of library function 'creall' [-Wincompatible-library-redeclaration]
int creall (void);
^
_configtest.c:17:5: note: 'creall' is a builtin with type 'long double (_Complex long double)'
_configtest.c:18:5: warning: incompatible redeclaration of library function 'csinl' [-Wincompatible-library-redeclaration]
int csinl (void);
^
_configtest.c:18:5: note: 'csinl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:19:5: warning: incompatible redeclaration of library function 'csinhl' [-Wincompatible-library-redeclaration]
int csinhl (void);
^
_configtest.c:19:5: note: 'csinhl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:20:5: warning: incompatible redeclaration of library function 'csqrtl' [-Wincompatible-library-redeclaration]
int csqrtl (void);
^
_configtest.c:20:5: note: 'csqrtl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:21:5: warning: incompatible redeclaration of library function 'ctanl' [-Wincompatible-library-redeclaration]
int ctanl (void);
^
_configtest.c:21:5: note: 'ctanl' is a builtin with type '_Complex long double (_Complex long double)'
_configtest.c:22:5: warning: incompatible redeclaration of library function 'ctanhl' [-Wincompatible-library-redeclaration]
int ctanhl (void);
^
_configtest.c:22:5: note: 'ctanhl' is a builtin with type '_Complex long double (_Complex long double)'
22 warnings generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:2:12: warning: unused function 'static_func' [-Wunused-function]
static int static_func (char * restrict a)
^
1 warning generated.
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:3:19: warning: unused function 'static_func' [-Wunused-function]
static inline int static_func (void)
^
1 warning generated.
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
removing: _configtest.c _configtest.o _configtest.o.d
File: build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/config.h
#define SIZEOF_PY_INTPTR_T 8
#define SIZEOF_OFF_T 8
#define SIZEOF_PY_LONG_LONG 8
#define MATHLIB
#define HAVE_SIN 1
#define HAVE_COS 1
#define HAVE_TAN 1
#define HAVE_SINH 1
#define HAVE_COSH 1
#define HAVE_TANH 1
#define HAVE_FABS 1
#define HAVE_FLOOR 1
#define HAVE_CEIL 1
#define HAVE_SQRT 1
#define HAVE_LOG10 1
#define HAVE_LOG 1
#define HAVE_EXP 1
#define HAVE_ASIN 1
#define HAVE_ACOS 1
#define HAVE_ATAN 1
#define HAVE_FMOD 1
#define HAVE_MODF 1
#define HAVE_FREXP 1
#define HAVE_LDEXP 1
#define HAVE_RINT 1
#define HAVE_TRUNC 1
#define HAVE_EXP2 1
#define HAVE_LOG2 1
#define HAVE_ATAN2 1
#define HAVE_POW 1
#define HAVE_NEXTAFTER 1
#define HAVE_STRTOLL 1
#define HAVE_STRTOULL 1
#define HAVE_CBRT 1
#define HAVE_STRTOLD_L 1
#define HAVE_BACKTRACE 1
#define HAVE_MADVISE 1
#define HAVE_XMMINTRIN_H 1
#define HAVE_EMMINTRIN_H 1
#define HAVE_XLOCALE_H 1
#define HAVE_DLFCN_H 1
#define HAVE_SYS_MMAN_H 1
#define HAVE___BUILTIN_ISNAN 1
#define HAVE___BUILTIN_ISINF 1
#define HAVE___BUILTIN_ISFINITE 1
#define HAVE___BUILTIN_BSWAP32 1
#define HAVE___BUILTIN_BSWAP64 1
#define HAVE___BUILTIN_EXPECT 1
#define HAVE___BUILTIN_MUL_OVERFLOW 1
#define HAVE___BUILTIN_CPU_SUPPORTS 1
#define HAVE__M_FROM_INT64 1
#define HAVE__MM_LOAD_PS 1
#define HAVE__MM_PREFETCH 1
#define HAVE__MM_LOAD_PD 1
#define HAVE___BUILTIN_PREFETCH 1
#define HAVE_LINK_AVX 1
#define HAVE_LINK_AVX2 1
#define HAVE_XGETBV 1
#define HAVE_ATTRIBUTE_NONNULL 1
#define HAVE_ATTRIBUTE_TARGET_AVX 1
#define HAVE_ATTRIBUTE_TARGET_AVX2 1
#define HAVE___THREAD 1
#define HAVE_SINF 1
#define HAVE_COSF 1
#define HAVE_TANF 1
#define HAVE_SINHF 1
#define HAVE_COSHF 1
#define HAVE_TANHF 1
#define HAVE_FABSF 1
#define HAVE_FLOORF 1
#define HAVE_CEILF 1
#define HAVE_RINTF 1
#define HAVE_TRUNCF 1
#define HAVE_SQRTF 1
#define HAVE_LOG10F 1
#define HAVE_LOGF 1
#define HAVE_LOG1PF 1
#define HAVE_EXPF 1
#define HAVE_EXPM1F 1
#define HAVE_ASINF 1
#define HAVE_ACOSF 1
#define HAVE_ATANF 1
#define HAVE_ASINHF 1
#define HAVE_ACOSHF 1
#define HAVE_ATANHF 1
#define HAVE_HYPOTF 1
#define HAVE_ATAN2F 1
#define HAVE_POWF 1
#define HAVE_FMODF 1
#define HAVE_MODFF 1
#define HAVE_FREXPF 1
#define HAVE_LDEXPF 1
#define HAVE_EXP2F 1
#define HAVE_LOG2F 1
#define HAVE_COPYSIGNF 1
#define HAVE_NEXTAFTERF 1
#define HAVE_CBRTF 1
#define HAVE_SINL 1
#define HAVE_COSL 1
#define HAVE_TANL 1
#define HAVE_SINHL 1
#define HAVE_COSHL 1
#define HAVE_TANHL 1
#define HAVE_FABSL 1
#define HAVE_FLOORL 1
#define HAVE_CEILL 1
#define HAVE_RINTL 1
#define HAVE_TRUNCL 1
#define HAVE_SQRTL 1
#define HAVE_LOG10L 1
#define HAVE_LOGL 1
#define HAVE_LOG1PL 1
#define HAVE_EXPL 1
#define HAVE_EXPM1L 1
#define HAVE_ASINL 1
#define HAVE_ACOSL 1
#define HAVE_ATANL 1
#define HAVE_ASINHL 1
#define HAVE_ACOSHL 1
#define HAVE_ATANHL 1
#define HAVE_HYPOTL 1
#define HAVE_ATAN2L 1
#define HAVE_POWL 1
#define HAVE_FMODL 1
#define HAVE_MODFL 1
#define HAVE_FREXPL 1
#define HAVE_LDEXPL 1
#define HAVE_EXP2L 1
#define HAVE_LOG2L 1
#define HAVE_COPYSIGNL 1
#define HAVE_NEXTAFTERL 1
#define HAVE_CBRTL 1
#define HAVE_DECL_SIGNBIT
#define HAVE_COMPLEX_H 1
#define HAVE_CABS 1
#define HAVE_CACOS 1
#define HAVE_CACOSH 1
#define HAVE_CARG 1
#define HAVE_CASIN 1
#define HAVE_CASINH 1
#define HAVE_CATAN 1
#define HAVE_CATANH 1
#define HAVE_CCOS 1
#define HAVE_CCOSH 1
#define HAVE_CEXP 1
#define HAVE_CIMAG 1
#define HAVE_CLOG 1
#define HAVE_CONJ 1
#define HAVE_CPOW 1
#define HAVE_CPROJ 1
#define HAVE_CREAL 1
#define HAVE_CSIN 1
#define HAVE_CSINH 1
#define HAVE_CSQRT 1
#define HAVE_CTAN 1
#define HAVE_CTANH 1
#define HAVE_CABSF 1
#define HAVE_CACOSF 1
#define HAVE_CACOSHF 1
#define HAVE_CARGF 1
#define HAVE_CASINF 1
#define HAVE_CASINHF 1
#define HAVE_CATANF 1
#define HAVE_CATANHF 1
#define HAVE_CCOSF 1
#define HAVE_CCOSHF 1
#define HAVE_CEXPF 1
#define HAVE_CIMAGF 1
#define HAVE_CLOGF 1
#define HAVE_CONJF 1
#define HAVE_CPOWF 1
#define HAVE_CPROJF 1
#define HAVE_CREALF 1
#define HAVE_CSINF 1
#define HAVE_CSINHF 1
#define HAVE_CSQRTF 1
#define HAVE_CTANF 1
#define HAVE_CTANHF 1
#define HAVE_CABSL 1
#define HAVE_CACOSL 1
#define HAVE_CACOSHL 1
#define HAVE_CARGL 1
#define HAVE_CASINL 1
#define HAVE_CASINHL 1
#define HAVE_CATANL 1
#define HAVE_CATANHL 1
#define HAVE_CCOSL 1
#define HAVE_CCOSHL 1
#define HAVE_CEXPL 1
#define HAVE_CIMAGL 1
#define HAVE_CLOGL 1
#define HAVE_CONJL 1
#define HAVE_CPOWL 1
#define HAVE_CPROJL 1
#define HAVE_CREALL 1
#define HAVE_CSINL 1
#define HAVE_CSINHL 1
#define HAVE_CSQRTL 1
#define HAVE_CTANL 1
#define HAVE_CTANHL 1
#define NPY_RESTRICT restrict
#define NPY_RELAXED_STRIDES_CHECKING 1
#define HAVE_LDOUBLE_INTEL_EXTENDED_16_BYTES_LE 1
#define NPY_PY3K 1
#ifndef __cplusplus
/* #undef inline */
#endif
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
EOF
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/config.h' to sources.
Generating build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
_configtest.c:1:5: warning: incompatible redeclaration of library function 'exp' [-Wincompatible-library-redeclaration]
int exp (void);
^
_configtest.c:1:5: note: 'exp' is a builtin with type 'double (double)'
1 warning generated.
clang _configtest.o -o _configtest
success!
removing: _configtest.c _configtest.o _configtest.o.d _configtest
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -c'
clang: _configtest.c
success!
removing: _configtest.c _configtest.o _configtest.o.d
File: build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h
#define NPY_SIZEOF_SHORT SIZEOF_SHORT
#define NPY_SIZEOF_INT SIZEOF_INT
#define NPY_SIZEOF_LONG SIZEOF_LONG
#define NPY_SIZEOF_FLOAT 4
#define NPY_SIZEOF_COMPLEX_FLOAT 8
#define NPY_SIZEOF_DOUBLE 8
#define NPY_SIZEOF_COMPLEX_DOUBLE 16
#define NPY_SIZEOF_LONGDOUBLE 16
#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32
#define NPY_SIZEOF_PY_INTPTR_T 8
#define NPY_SIZEOF_OFF_T 8
#define NPY_SIZEOF_PY_LONG_LONG 8
#define NPY_SIZEOF_LONGLONG 8
#define NPY_NO_SMP 0
#define NPY_HAVE_DECL_ISNAN
#define NPY_HAVE_DECL_ISINF
#define NPY_HAVE_DECL_ISFINITE
#define NPY_HAVE_DECL_SIGNBIT
#define NPY_USE_C99_COMPLEX 1
#define NPY_HAVE_COMPLEX_DOUBLE 1
#define NPY_HAVE_COMPLEX_FLOAT 1
#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1
#define NPY_RELAXED_STRIDES_CHECKING 1
#define NPY_USE_C99_FORMATS 1
#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden")))
#define NPY_ABI_VERSION 0x01000009
#define NPY_API_VERSION 0x0000000D
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
EOF
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h' to sources.
executing numpy/core/code_generators/generate_numpy_api.py
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__multiarray_api.h' to sources.
numpy.core - nothing done with h_files = ['build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/config.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__multiarray_api.h']
building extension "numpy.core._multiarray_tests" sources
creating build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/_multiarray_tests.c
building extension "numpy.core._multiarray_umath" sources
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/config.h' to sources.
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h' to sources.
executing numpy/core/code_generators/generate_numpy_api.py
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__multiarray_api.h' to sources.
executing numpy/core/code_generators/generate_ufunc_api.py
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__ufunc_api.h' to sources.
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/arraytypes.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/einsum.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/lowlevel_strided_loops.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_templ.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/scalartypes.c
creating build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/funcs.inc
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath' to include_dirs.
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/simd.inc
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/loops.h
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/loops.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/matmul.h
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/matmul.c
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/scalarmath.c
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath' to include_dirs.
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/common/templ_common.h
adding 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/common' to include_dirs.
numpy.core - nothing done with h_files = ['build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/funcs.inc', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/simd.inc', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/loops.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/matmul.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/npy_math_internal.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/src/common/templ_common.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/config.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/_numpyconfig.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__multiarray_api.h', 'build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__ufunc_api.h']
building extension "numpy.core._umath_tests" sources
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_umath_tests.c
building extension "numpy.core._rational_tests" sources
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_rational_tests.c
building extension "numpy.core._struct_ufunc_tests" sources
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_struct_ufunc_tests.c
building extension "numpy.core._operand_flag_tests" sources
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_operand_flag_tests.c
building extension "numpy.fft.fftpack_lite" sources
building extension "numpy.linalg.lapack_lite" sources
creating build/src.macosx-10.15-x86_64-3.9/numpy/linalg
adding 'numpy/linalg/lapack_lite/python_xerbla.c' to sources.
building extension "numpy.linalg._umath_linalg" sources
adding 'numpy/linalg/lapack_lite/python_xerbla.c' to sources.
conv_template:> build/src.macosx-10.15-x86_64-3.9/numpy/linalg/umath_linalg.c
building extension "numpy.random.mtrand" sources
creating build/src.macosx-10.15-x86_64-3.9/numpy/random
building data_files sources
build_src: building npy-pkg config files
running build_py
creating build/lib.macosx-10.15-x86_64-3.9
creating build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/conftest.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/version.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/_globals.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/dual.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/_distributor_init.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/ctypeslib.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/matlib.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying numpy/_pytesttester.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
copying build/src.macosx-10.15-x86_64-3.9/numpy/__config__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy
creating build/lib.macosx-10.15-x86_64-3.9/numpy/compat
copying numpy/compat/py3k.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/compat
copying numpy/compat/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/compat
copying numpy/compat/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/compat
copying numpy/compat/_inspect.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/compat
creating build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/umath.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/fromnumeric.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_dtype.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_add_newdocs.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_methods.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_internal.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_string_helpers.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/multiarray.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/records.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/setup_common.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_aliased_types.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/memmap.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/overrides.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/getlimits.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_dtype_ctypes.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/defchararray.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/shape_base.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/machar.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/numeric.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/function_base.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/einsumfunc.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/umath_tests.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/numerictypes.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/_type_aliases.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/cversions.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/arrayprint.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
copying numpy/core/code_generators/generate_numpy_api.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/core
creating build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/unixccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/numpy_distribution.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/conv_template.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/cpuinfo.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/ccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/msvc9compiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/npy_pkg_config.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/compat.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/misc_util.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/log.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/line_endings.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/lib2def.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/pathccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/system_info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/core.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/__version__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/exec_command.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/from_template.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/mingw32ccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/extension.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/msvccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/intelccompiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying numpy/distutils/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
copying build/src.macosx-10.15-x86_64-3.9/numpy/distutils/__config__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils
creating build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/config_compiler.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build_ext.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/config.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/install_headers.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build_py.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build_src.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/sdist.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build_scripts.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/bdist_rpm.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/install_clib.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/build_clib.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/autodist.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/egg_info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/install.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/develop.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
copying numpy/distutils/command/install_data.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/command
creating build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/gnu.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/compaq.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/intel.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/none.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/nag.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/pg.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/ibm.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/sun.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/lahey.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/g95.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/mips.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/hpux.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/environment.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/pathf95.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/absoft.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
copying numpy/distutils/fcompiler/vast.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/distutils/fcompiler
creating build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/misc.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/internals.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/creation.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/constants.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/ufuncs.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/broadcasting.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/basics.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/subclassing.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/indexing.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/byteswapping.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/structured_arrays.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
copying numpy/doc/glossary.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/doc
creating build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/cfuncs.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/common_rules.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/crackfortran.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/cb_rules.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/rules.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/f2py2e.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/func2subr.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/__version__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/diagnose.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/capi_maps.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/f90mod_rules.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/f2py_testing.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/use_rules.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/auxfuncs.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
copying numpy/f2py/__main__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/f2py
creating build/lib.macosx-10.15-x86_64-3.9/numpy/fft
copying numpy/fft/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/fft
copying numpy/fft/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/fft
copying numpy/fft/helper.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/fft
copying numpy/fft/fftpack.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/fft
copying numpy/fft/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/fft
creating build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/_iotools.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/mixins.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/nanfunctions.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/recfunctions.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/histograms.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/scimath.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/_version.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/user_array.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/format.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/twodim_base.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/financial.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/index_tricks.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/npyio.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/shape_base.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/stride_tricks.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/utils.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/arrayterator.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/function_base.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/arraysetops.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/arraypad.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/type_check.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/polynomial.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/_datasource.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
copying numpy/lib/ufunclike.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/lib
creating build/lib.macosx-10.15-x86_64-3.9/numpy/linalg
copying numpy/linalg/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/linalg
copying numpy/linalg/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/linalg
copying numpy/linalg/linalg.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/linalg
copying numpy/linalg/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/linalg
creating build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/extras.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/version.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/testutils.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/core.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/bench.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/timer_comparison.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
copying numpy/ma/mrecords.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/ma
creating build/lib.macosx-10.15-x86_64-3.9/numpy/matrixlib
copying numpy/matrixlib/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/matrixlib
copying numpy/matrixlib/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/matrixlib
copying numpy/matrixlib/defmatrix.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/matrixlib
creating build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/laguerre.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/_polybase.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/polyutils.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/hermite_e.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/chebyshev.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/polynomial.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/legendre.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
copying numpy/polynomial/hermite.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/polynomial
creating build/lib.macosx-10.15-x86_64-3.9/numpy/random
copying numpy/random/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/random
copying numpy/random/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/random
copying numpy/random/info.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/random
creating build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/nosetester.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/noseclasses.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/setup.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/utils.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/print_coercion_tables.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
copying numpy/testing/decorators.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing
creating build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/nosetester.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/__init__.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/noseclasses.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/utils.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/parameterized.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
copying numpy/testing/_private/decorators.py -> build/lib.macosx-10.15-x86_64-3.9/numpy/testing/_private
running build_clib
customize UnixCCompiler
customize UnixCCompiler using build_clib
building 'npymath' library
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9
creating build/temp.macosx-10.15-x86_64-3.9/numpy
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core/src
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/npymath
creating build/temp.macosx-10.15-x86_64-3.9/build
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath
compile options: '-Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: numpy/core/src/npymath/npy_math.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/npy_math_complex.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/ieee754.c
clang: numpy/core/src/npymath/halffloat.c
numpy/core/src/npymath/npy_math_complex.c.src:48:33: warning: unused variable 'tiny' [-Wunused-const-variable]
static const volatile npy_float tiny = 3.9443045e-31f;
^
numpy/core/src/npymath/npy_math_complex.c.src:67:25: warning: unused variable 'c_halff' [-Wunused-const-variable]
static const npy_cfloat c_halff = {0.5F, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:25: warning: unused variable 'c_if' [-Wunused-const-variable]
static const npy_cfloat c_if = {0.0, 1.0F};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:25: warning: unused variable 'c_ihalff' [-Wunused-const-variable]
static const npy_cfloat c_ihalff = {0.0, 0.5F};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'caddf' [-Wunused-function]
caddf(npy_cfloat a, npy_cfloat b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csubf' [-Wunused-function]
csubf(npy_cfloat a, npy_cfloat b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cnegf' [-Wunused-function]
cnegf(npy_cfloat a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmulif' [-Wunused-function]
cmulif(npy_cfloat a)
^
numpy/core/src/npymath/npy_math_complex.c.src:67:26: warning: unused variable 'c_half' [-Wunused-const-variable]
static const npy_cdouble c_half = {0.5, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:26: warning: unused variable 'c_i' [-Wunused-const-variable]
static const npy_cdouble c_i = {0.0, 1.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:26: warning: unused variable 'c_ihalf' [-Wunused-const-variable]
static const npy_cdouble c_ihalf = {0.0, 0.5};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'cadd' [-Wunused-function]
cadd(npy_cdouble a, npy_cdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csub' [-Wunused-function]
csub(npy_cdouble a, npy_cdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cneg' [-Wunused-function]
cneg(npy_cdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmuli' [-Wunused-function]
cmuli(npy_cdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:67:30: warning: unused variable 'c_halfl' [-Wunused-const-variable]
static const npy_clongdouble c_halfl = {0.5L, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:30: warning: unused variable 'c_il' [-Wunused-const-variable]
static const npy_clongdouble c_il = {0.0, 1.0L};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:30: warning: unused variable 'c_ihalfl' [-Wunused-const-variable]
static const npy_clongdouble c_ihalfl = {0.0, 0.5L};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'caddl' [-Wunused-function]
caddl(npy_clongdouble a, npy_clongdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csubl' [-Wunused-function]
csubl(npy_clongdouble a, npy_clongdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cnegl' [-Wunused-function]
cnegl(npy_clongdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmulil' [-Wunused-function]
cmulil(npy_clongdouble a)
^
22 warnings generated.
ar: adding 4 object files to build/temp.macosx-10.15-x86_64-3.9/libnpymath.a
ranlib:@ build/temp.macosx-10.15-x86_64-3.9/libnpymath.a
building 'npysort' library
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort
compile options: '-Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort/quicksort.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort/mergesort.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort/heapsort.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort/selection.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npysort/binsearch.c
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
numpy/core/src/npysort/selection.c.src:328:9: warning: code will never be executed [-Wunreachable-code]
npy_intp k;
^~~~~~~~~~~
numpy/core/src/npysort/selection.c.src:326:14: note: silence by adding parentheses to mark code as explicitly dead
else if (0 && kth == num - 1) {
^
/* DISABLES CODE */ ( )
22 warnings generated.
ar: adding 5 object files to build/temp.macosx-10.15-x86_64-3.9/libnpysort.a
ranlib:@ build/temp.macosx-10.15-x86_64-3.9/libnpysort.a
running build_ext
customize UnixCCompiler
customize UnixCCompiler using build_ext
building 'numpy.core._dummy' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: numpy/core/src/dummymodule.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/dummymodule.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_dummy.cpython-39-darwin.so
building 'numpy.core._multiarray_tests' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/_multiarray_tests.c
clang: numpy/core/src/common/mem_overlap.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/_multiarray_tests.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/mem_overlap.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -lnpymath -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_multiarray_tests.cpython-39-darwin.so
building 'numpy.core._multiarray_umath' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray
creating build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath
creating build/temp.macosx-10.15-x86_64-3.9/private
creating build/temp.macosx-10.15-x86_64-3.9/private/var
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy/_build_utils
creating build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy/_build_utils/src
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
extra options: '-msse3 -I/System/Library/Frameworks/vecLib.framework/Headers'
clang: numpy/core/src/multiarray/alloc.c
clang: numpy/core/src/multiarray/calculation.cclang: numpy/core/src/multiarray/array_assign_scalar.c
clang: numpy/core/src/multiarray/convert.c
clang: numpy/core/src/multiarray/ctors.c
clang: numpy/core/src/multiarray/datetime_busday.c
clang: numpy/core/src/multiarray/dragon4.cclang: numpy/core/src/multiarray/flagsobject.c
numpy/core/src/multiarray/ctors.c:2261:36: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/ctors.c:2261:36: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/ctors.c:2261:36: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
if (!(PyUString_Check(name) && PyUString_GET_SIZE(name) == 0)) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
clang: numpy/core/src/multiarray/arrayobject.c
clang: numpy/core/src/multiarray/array_assign_array.c
clang: numpy/core/src/multiarray/convert_datatype.c
clang: numpy/core/src/multiarray/getset.c
clang: numpy/core/src/multiarray/datetime_busdaycal.c
clang: numpy/core/src/multiarray/buffer.c
clang: numpy/core/src/multiarray/compiled_base.c
clang: numpy/core/src/multiarray/hashdescr.c
clang: numpy/core/src/multiarray/descriptor.c
numpy/core/src/multiarray/descriptor.c:453:13: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
if (PyUString_GET_SIZE(name) == 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/descriptor.c:453:13: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
if (PyUString_GET_SIZE(name) == 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/descriptor.c:453:13: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
if (PyUString_GET_SIZE(name) == 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/descriptor.c:460:48: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/descriptor.c:460:48: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/descriptor.c:460:48: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
else if (PyUString_Check(title) && PyUString_GET_SIZE(title) > 0) {
^
numpy/core/include/numpy/npy_3kcompat.h:110:28: note: expanded from macro 'PyUString_GET_SIZE'
#define PyUString_GET_SIZE PyUnicode_GET_SIZE
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
clang: numpy/core/src/multiarray/conversion_utils.c
clang: numpy/core/src/multiarray/item_selection.c
clang: numpy/core/src/multiarray/dtype_transfer.c
clang: numpy/core/src/multiarray/mapping.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/arraytypes.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_templ.c
3 warnings generated.
clang: numpy/core/src/multiarray/datetime.c
numpy/core/src/multiarray/arraytypes.c.src:477:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
ptr = PyUnicode_AS_UNICODE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/arraytypes.c.src:482:15: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
datalen = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/arraytypes.c.src:482:15: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
datalen = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/arraytypes.c.src:482:15: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
datalen = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
clang: numpy/core/src/multiarray/common.c
numpy/core/src/multiarray/common.c:187:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:187:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:187:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:239:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:239:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:239:28: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
itemsize = PyUnicode_GET_DATA_SIZE(temp);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:282:24: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
int itemsize = PyUnicode_GET_DATA_SIZE(obj);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:282:24: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
int itemsize = PyUnicode_GET_DATA_SIZE(obj);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/common.c:282:24: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
int itemsize = PyUnicode_GET_DATA_SIZE(obj);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
6 warnings generated.
clang: numpy/core/src/multiarray/nditer_pywrap.c
9 warnings generated.
clang: numpy/core/src/multiarray/sequence.c
clang: numpy/core/src/multiarray/shape.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/einsum.c
clang: numpy/core/src/multiarray/methods.c
clang: numpy/core/src/multiarray/iterators.c
clang: numpy/core/src/multiarray/datetime_strings.c
clang: numpy/core/src/multiarray/number.c
clang: numpy/core/src/multiarray/scalarapi.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/scalartypes.c
numpy/core/src/multiarray/scalarapi.c:74:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
return (void *)PyUnicode_AS_DATA(scalar);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'
((const char *)(PyUnicode_AS_UNICODE(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalarapi.c:135:28: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
return (void *)PyUnicode_AS_DATA(scalar);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'
((const char *)(PyUnicode_AS_UNICODE(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalarapi.c:568:29: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
descr->elsize = PyUnicode_GET_DATA_SIZE(sc);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalarapi.c:568:29: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
descr->elsize = PyUnicode_GET_DATA_SIZE(sc);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalarapi.c:568:29: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
descr->elsize = PyUnicode_GET_DATA_SIZE(sc);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:475:17: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
ip = dptr = PyUnicode_AS_UNICODE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:481:11: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]
new = PyUnicode_FromUnicode(ip, len);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:475:17: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
ip = dptr = PyUnicode_AS_UNICODE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:476:11: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
len = PyUnicode_GET_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:481:11: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]
new = PyUnicode_FromUnicode(ip, len);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:1849:18: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
buffer = PyUnicode_AS_DATA(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:283:21: note: expanded from macro 'PyUnicode_AS_DATA'
((const char *)(PyUnicode_AS_UNICODE(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:279:7: note: expanded from macro 'PyUnicode_AS_UNICODE'
PyUnicode_AsUnicode(_PyObject_CAST(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:1850:18: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
buflen = PyUnicode_GET_DATA_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:1850:18: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
buflen = PyUnicode_GET_DATA_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/scalartypes.c.src:1850:18: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
buflen = PyUnicode_GET_DATA_SIZE(self);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:268:6: note: expanded from macro 'PyUnicode_GET_DATA_SIZE'
(PyUnicode_GET_SIZE(op) * Py_UNICODE_SIZE)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
5 warnings generated.
clang: numpy/core/src/multiarray/typeinfo.c
clang: numpy/core/src/multiarray/refcount.c
clang: numpy/core/src/multiarray/usertypes.c
clang: numpy/core/src/multiarray/multiarraymodule.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/lowlevel_strided_loops.c
clang: numpy/core/src/multiarray/vdot.c
clang: numpy/core/src/umath/umathmodule.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/matmul.c
clang: numpy/core/src/umath/reduction.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/loops.c
clang: numpy/core/src/multiarray/nditer_api.c
14 warnings generated.
clang: numpy/core/src/multiarray/strfuncs.c
numpy/core/src/umath/loops.c.src:655:18: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]
result = PyEval_CallObject(tocall, arglist);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'
PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here
Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/strfuncs.c:178:13: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]
s = PyEval_CallObject(PyArray_ReprFunction, arglist);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'
PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here
Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/core/src/multiarray/strfuncs.c:195:13: warning: 'PyEval_CallObjectWithKeywords' is deprecated [-Wdeprecated-declarations]
s = PyEval_CallObject(PyArray_StrFunction, arglist);
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:24:5: note: expanded from macro 'PyEval_CallObject'
PyEval_CallObjectWithKeywords(callable, arg, (PyObject *)NULL)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/ceval.h:17:1: note: 'PyEval_CallObjectWithKeywords' has been explicitly marked deprecated here
Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
2 warnings generated.
clang: numpy/core/src/multiarray/temp_elide.c
clang: numpy/core/src/umath/cpuid.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/scalarmath.c
clang: numpy/core/src/umath/ufunc_object.c
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'byte_long' [-Wunused-function]
byte_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'ubyte_long' [-Wunused-function]
ubyte_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'short_long' [-Wunused-function]
short_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'ushort_long' [-Wunused-function]
ushort_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'int_long' [-Wunused-function]
int_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'uint_long' [-Wunused-function]
uint_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'long_long' [-Wunused-function]
long_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'ulong_long' [-Wunused-function]
ulong_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'longlong_long' [-Wunused-function]
longlong_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'ulonglong_long' [-Wunused-function]
ulonglong_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'half_long' [-Wunused-function]
half_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'float_long' [-Wunused-function]
float_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'double_long' [-Wunused-function]
double_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'longdouble_long' [-Wunused-function]
longdouble_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'cfloat_long' [-Wunused-function]
cfloat_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'cdouble_long' [-Wunused-function]
cdouble_long(PyObject *obj)
^
numpy/core/src/umath/scalarmath.c.src:1449:1: warning: unused function 'clongdouble_long' [-Wunused-function]
clongdouble_long(PyObject *obj)
^
clang: numpy/core/src/multiarray/nditer_constr.c
numpy/core/src/umath/ufunc_object.c:657:19: warning: comparison of integers of different signs: 'int' and 'size_t' (aka 'unsigned long') [-Wsign-compare]
for (i = 0; i < len; i++) {
~ ^ ~~~
clang: numpy/core/src/umath/override.c
clang: numpy/core/src/npymath/npy_math.c
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/ieee754.c
numpy/core/src/umath/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]
npy_intp n = dimensions[0];
^~~~~~~~~~
numpy/core/src/umath/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead
if (IS_BINARY_REDUCE && 0) {
^
/* DISABLES CODE */ ( )
numpy/core/src/umath/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]
npy_intp n = dimensions[0];
^~~~~~~~~~
numpy/core/src/umath/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead
if (IS_BINARY_REDUCE && 0) {
^
/* DISABLES CODE */ ( )
numpy/core/src/umath/loops.c.src:2527:22: warning: code will never be executed [-Wunreachable-code]
npy_intp n = dimensions[0];
^~~~~~~~~~
numpy/core/src/umath/loops.c.src:2526:29: note: silence by adding parentheses to mark code as explicitly dead
if (IS_BINARY_REDUCE && 0) {
^
/* DISABLES CODE */ ( )
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/npy_math_complex.c
numpy/core/src/npymath/npy_math_complex.c.src:48:33: warning: unused variable 'tiny' [-Wunused-const-variable]
static const volatile npy_float tiny = 3.9443045e-31f;
^
numpy/core/src/npymath/npy_math_complex.c.src:67:25: warning: unused variable 'c_halff' [-Wunused-const-variable]
static const npy_cfloat c_halff = {0.5F, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:25: warning: unused variable 'c_if' [-Wunused-const-variable]
static const npy_cfloat c_if = {0.0, 1.0F};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:25: warning: unused variable 'c_ihalff' [-Wunused-const-variable]
static const npy_cfloat c_ihalff = {0.0, 0.5F};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'caddf' [-Wunused-function]
caddf(npy_cfloat a, npy_cfloat b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csubf' [-Wunused-function]
csubf(npy_cfloat a, npy_cfloat b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cnegf' [-Wunused-function]
cnegf(npy_cfloat a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmulif' [-Wunused-function]
cmulif(npy_cfloat a)
^
numpy/core/src/npymath/npy_math_complex.c.src:67:26: warning: unused variable 'c_half' [-Wunused-const-variable]
static const npy_cdouble c_half = {0.5, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:26: warning: unused variable 'c_i' [-Wunused-const-variable]
static const npy_cdouble c_i = {0.0, 1.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:26: warning: unused variable 'c_ihalf' [-Wunused-const-variable]
static const npy_cdouble c_ihalf = {0.0, 0.5};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'cadd' [-Wunused-function]
cadd(npy_cdouble a, npy_cdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csub' [-Wunused-function]
csub(npy_cdouble a, npy_cdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cneg' [-Wunused-function]
cneg(npy_cdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmuli' [-Wunused-function]
cmuli(npy_cdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:67:30: warning: unused variable 'c_halfl' [-Wunused-const-variable]
static const npy_clongdouble c_halfl = {0.5L, 0.0};
^
numpy/core/src/npymath/npy_math_complex.c.src:68:30: warning: unused variable 'c_il' [-Wunused-const-variable]
static const npy_clongdouble c_il = {0.0, 1.0L};
^
numpy/core/src/npymath/npy_math_complex.c.src:69:30: warning: unused variable 'c_ihalfl' [-Wunused-const-variable]
static const npy_clongdouble c_ihalfl = {0.0, 0.5L};
^
numpy/core/src/npymath/npy_math_complex.c.src:79:1: warning: unused function 'caddl' [-Wunused-function]
caddl(npy_clongdouble a, npy_clongdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:87:1: warning: unused function 'csubl' [-Wunused-function]
csubl(npy_clongdouble a, npy_clongdouble b)
^
numpy/core/src/npymath/npy_math_complex.c.src:137:1: warning: unused function 'cnegl' [-Wunused-function]
cnegl(npy_clongdouble a)
^
numpy/core/src/npymath/npy_math_complex.c.src:144:1: warning: unused function 'cmulil' [-Wunused-function]
cmulil(npy_clongdouble a)
^
22 warnings generated.
clang: numpy/core/src/common/mem_overlap.c
clang: numpy/core/src/npymath/halffloat.c
clang: numpy/core/src/common/array_assign.c
clang: numpy/core/src/common/ufunc_override.c
clang: numpy/core/src/common/npy_longdouble.c
clang: numpy/core/src/common/numpyos.c
clang: numpy/core/src/common/ucsnarrow.c
1 warning generated.
clang: numpy/core/src/umath/extobj.c
numpy/core/src/common/ucsnarrow.c:139:34: warning: 'PyUnicode_FromUnicode' is deprecated [-Wdeprecated-declarations]
ret = (PyUnicodeObject *)PyUnicode_FromUnicode((Py_UNICODE*)buf,
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:551:1: note: 'PyUnicode_FromUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
1 warning generated.
clang: numpy/core/src/common/python_xerbla.c
clang: numpy/core/src/common/cblasfuncs.c
clang: /private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy/_build_utils/src/apple_sgemv_fix.c
In file included from /private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy/_build_utils/src/apple_sgemv_fix.c:26:
In file included from numpy/core/include/numpy/arrayobject.h:4:
In file included from numpy/core/include/numpy/ndarrayobject.h:21:
build/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy/__multiarray_api.h:1463:1: warning: unused function '_import_array' [-Wunused-function]
_import_array(void)
^
1 warning generated.
17 warnings generated.
clang: numpy/core/src/umath/ufunc_type_resolution.c
4 warnings generated.
4 warnings generated.
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/alloc.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/arrayobject.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/arraytypes.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/array_assign_scalar.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/array_assign_array.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/buffer.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/calculation.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/compiled_base.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/common.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/convert.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/convert_datatype.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/conversion_utils.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/ctors.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/datetime.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/datetime_strings.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/datetime_busday.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/datetime_busdaycal.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/descriptor.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/dragon4.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/dtype_transfer.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/einsum.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/flagsobject.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/getset.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/hashdescr.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/item_selection.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/iterators.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/lowlevel_strided_loops.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/mapping.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/methods.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/multiarraymodule.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_templ.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_api.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_constr.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/nditer_pywrap.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/number.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/refcount.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/sequence.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/shape.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/scalarapi.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/scalartypes.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/strfuncs.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/temp_elide.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/typeinfo.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/usertypes.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/multiarray/vdot.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/umathmodule.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/reduction.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/loops.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/matmul.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/ufunc_object.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/extobj.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/cpuid.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/scalarmath.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/ufunc_type_resolution.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/umath/override.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/npy_math.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/ieee754.o build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/npy_math_complex.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/npymath/halffloat.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/array_assign.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/mem_overlap.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/npy_longdouble.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/ucsnarrow.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/ufunc_override.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/numpyos.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/cblasfuncs.o build/temp.macosx-10.15-x86_64-3.9/numpy/core/src/common/python_xerbla.o build/temp.macosx-10.15-x86_64-3.9/private/var/folders/fz/0j719tys48x7jlnjnwc69smr0000gn/T/pip-install-ufzck51l/numpy_b0e8a3953a1d4b46801f12bcea55536e/numpy/_build_utils/src/apple_sgemv_fix.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -lnpymath -lnpysort -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_multiarray_umath.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate
building 'numpy.core._umath_tests' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_umath_tests.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_umath_tests.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_umath_tests.cpython-39-darwin.so
building 'numpy.core._rational_tests' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_rational_tests.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_rational_tests.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_rational_tests.cpython-39-darwin.so
building 'numpy.core._struct_ufunc_tests' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_struct_ufunc_tests.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_struct_ufunc_tests.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_struct_ufunc_tests.cpython-39-darwin.so
building 'numpy.core._operand_flag_tests' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
compile options: '-DNPY_INTERNAL_BUILD=1 -DHAVE_NPY_CONFIG_H=1 -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_operand_flag_tests.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/core/src/umath/_operand_flag_tests.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/core/_operand_flag_tests.cpython-39-darwin.so
building 'numpy.fft.fftpack_lite' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/numpy/fft
compile options: '-Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: numpy/fft/fftpack_litemodule.c
clang: numpy/fft/fftpack.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/numpy/fft/fftpack_litemodule.o build/temp.macosx-10.15-x86_64-3.9/numpy/fft/fftpack.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/fft/fftpack_lite.cpython-39-darwin.so
building 'numpy.linalg.lapack_lite' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/numpy/linalg
creating build/temp.macosx-10.15-x86_64-3.9/numpy/linalg/lapack_lite
compile options: '-DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
extra options: '-msse3 -I/System/Library/Frameworks/vecLib.framework/Headers'
clang: numpy/linalg/lapack_litemodule.c
clang: numpy/linalg/lapack_lite/python_xerbla.c
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/numpy/linalg/lapack_litemodule.o build/temp.macosx-10.15-x86_64-3.9/numpy/linalg/lapack_lite/python_xerbla.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -o build/lib.macosx-10.15-x86_64-3.9/numpy/linalg/lapack_lite.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate
building 'numpy.linalg._umath_linalg' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/linalg
compile options: '-DNO_ATLAS_INFO=3 -DHAVE_CBLAS -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
extra options: '-msse3 -I/System/Library/Frameworks/vecLib.framework/Headers'
clang: build/src.macosx-10.15-x86_64-3.9/numpy/linalg/umath_linalg.c
numpy/linalg/umath_linalg.c.src:735:32: warning: unknown warning group '-Wmaybe-uninitialized', ignored [-Wunknown-warning-option]
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
^
numpy/linalg/umath_linalg.c.src:541:1: warning: unused function 'dump_ufunc_object' [-Wunused-function]
dump_ufunc_object(PyUFuncObject* ufunc)
^
numpy/linalg/umath_linalg.c.src:566:1: warning: unused function 'dump_linearize_data' [-Wunused-function]
dump_linearize_data(const char* name, const LINEARIZE_DATA_t* params)
^
numpy/linalg/umath_linalg.c.src:602:1: warning: unused function 'dump_FLOAT_matrix' [-Wunused-function]
dump_FLOAT_matrix(const char* name,
^
numpy/linalg/umath_linalg.c.src:602:1: warning: unused function 'dump_DOUBLE_matrix' [-Wunused-function]
dump_DOUBLE_matrix(const char* name,
^
numpy/linalg/umath_linalg.c.src:602:1: warning: unused function 'dump_CFLOAT_matrix' [-Wunused-function]
dump_CFLOAT_matrix(const char* name,
^
numpy/linalg/umath_linalg.c.src:602:1: warning: unused function 'dump_CDOUBLE_matrix' [-Wunused-function]
dump_CDOUBLE_matrix(const char* name,
^
numpy/linalg/umath_linalg.c.src:865:1: warning: unused function 'zero_FLOAT_matrix' [-Wunused-function]
zero_FLOAT_matrix(void *dst_in, const LINEARIZE_DATA_t* data)
^
numpy/linalg/umath_linalg.c.src:865:1: warning: unused function 'zero_DOUBLE_matrix' [-Wunused-function]
zero_DOUBLE_matrix(void *dst_in, const LINEARIZE_DATA_t* data)
^
numpy/linalg/umath_linalg.c.src:865:1: warning: unused function 'zero_CFLOAT_matrix' [-Wunused-function]
zero_CFLOAT_matrix(void *dst_in, const LINEARIZE_DATA_t* data)
^
numpy/linalg/umath_linalg.c.src:865:1: warning: unused function 'zero_CDOUBLE_matrix' [-Wunused-function]
zero_CDOUBLE_matrix(void *dst_in, const LINEARIZE_DATA_t* data)
^
numpy/linalg/umath_linalg.c.src:1862:1: warning: unused function 'dump_geev_params' [-Wunused-function]
dump_geev_params(const char *name, GEEV_PARAMS_t* params)
^
numpy/linalg/umath_linalg.c.src:2132:1: warning: unused function 'init_cgeev' [-Wunused-function]
init_cgeev(GEEV_PARAMS_t* params,
^
numpy/linalg/umath_linalg.c.src:2213:1: warning: unused function 'process_cgeev_results' [-Wunused-function]
process_cgeev_results(GEEV_PARAMS_t *NPY_UNUSED(params))
^
numpy/linalg/umath_linalg.c.src:2376:1: warning: unused function 'dump_gesdd_params' [-Wunused-function]
dump_gesdd_params(const char *name,
^
numpy/linalg/umath_linalg.c.src:2864:1: warning: unused function 'dump_gelsd_params' [-Wunused-function]
dump_gelsd_params(const char *name,
^
16 warnings generated.
clang -bundle -undefined dynamic_lookup -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk build/temp.macosx-10.15-x86_64-3.9/build/src.macosx-10.15-x86_64-3.9/numpy/linalg/umath_linalg.o build/temp.macosx-10.15-x86_64-3.9/numpy/linalg/lapack_lite/python_xerbla.o -L/usr/local/lib -L/usr/local/opt/[email protected]/lib -L/usr/local/opt/sqlite/lib -Lbuild/temp.macosx-10.15-x86_64-3.9 -lnpymath -o build/lib.macosx-10.15-x86_64-3.9/numpy/linalg/_umath_linalg.cpython-39-darwin.so -Wl,-framework -Wl,Accelerate
building 'numpy.random.mtrand' extension
compiling C sources
C compiler: clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers
creating build/temp.macosx-10.15-x86_64-3.9/numpy/random
creating build/temp.macosx-10.15-x86_64-3.9/numpy/random/mtrand
compile options: '-D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c'
clang: numpy/random/mtrand/mtrand.c
clang: numpy/random/mtrand/initarray.cclang: numpy/random/mtrand/randomkit.c
clang: numpy/random/mtrand/distributions.c
numpy/random/mtrand/mtrand.c:40400:34: error: no member named 'tp_print' in 'struct _typeobject'
__pyx_type_6mtrand_RandomState.tp_print = 0;
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^
numpy/random/mtrand/mtrand.c:42673:22: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42673:22: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42673:22: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42673:52: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42673:52: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42673:52: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:26: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:26: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:26: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:59: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:261:7: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op) : \
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:59: warning: 'PyUnicode_AsUnicode' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:262:14: note: expanded from macro 'PyUnicode_GET_SIZE'
((void)PyUnicode_AsUnicode(_PyObject_CAST(op)),\
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:580:1: note: 'PyUnicode_AsUnicode' has been explicitly marked deprecated here
Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
numpy/random/mtrand/mtrand.c:42689:59: warning: '_PyUnicode_get_wstr_length' is deprecated [-Wdeprecated-declarations]
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:264:8: note: expanded from macro 'PyUnicode_GET_SIZE'
PyUnicode_WSTR_LENGTH(op)))
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:451:35: note: expanded from macro 'PyUnicode_WSTR_LENGTH'
#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/cpython/unicodeobject.h:445:1: note: '_PyUnicode_get_wstr_length' has been explicitly marked deprecated here
Py_DEPRECATED(3.3)
^
/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9/pyport.h:508:54: note: expanded from macro 'Py_DEPRECATED'
#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
^
12 warnings and 1 error generated.
error: Command "clang -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall -isysroot /Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/usr/include -I/Library/Developer/CommandLineTools/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1 -D_LARGEFILE64_SOURCE=1 -Inumpy/core/include -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/include/numpy -Inumpy/core/src/common -Inumpy/core/src -Inumpy/core -Inumpy/core/src/npymath -Inumpy/core/src/multiarray -Inumpy/core/src/umath -Inumpy/core/src/npysort -I/usr/local/include -I/usr/local/opt/[email protected]/include -I/usr/local/opt/sqlite/include -I/Users/destiny/Downloads/env/include -I/usr/local/Cellar/[email protected]/3.9.0_1/Frameworks/Python.framework/Versions/3.9/include/python3.9 -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/common -Ibuild/src.macosx-10.15-x86_64-3.9/numpy/core/src/npymath -c numpy/random/mtrand/mtrand.c -o build/temp.macosx-10.15-x86_64-3.9/numpy/random/mtrand/mtrand.o -MMD -MF build/temp.macosx-10.15-x86_64-3.9/numpy/random/mtrand/mtrand.o.d" failed with exit status 1 | {
"+1": 1,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1696/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1696/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1695 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1695/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1695/comments | https://api.github.com/repos/huggingface/datasets/issues/1695/events | https://github.com/huggingface/datasets/pull/1695 | 780,971,987 | MDExOlB1bGxSZXF1ZXN0NTUwNzc1OTU4 | 1,695 | fix ner_tag bugs in thainer | {
"avatar_url": "https://avatars.githubusercontent.com/u/15519308?v=4",
"events_url": "https://api.github.com/users/cstorm125/events{/privacy}",
"followers_url": "https://api.github.com/users/cstorm125/followers",
"following_url": "https://api.github.com/users/cstorm125/following{/other_user}",
"gists_url": "https://api.github.com/users/cstorm125/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/cstorm125",
"id": 15519308,
"login": "cstorm125",
"node_id": "MDQ6VXNlcjE1NTE5MzA4",
"organizations_url": "https://api.github.com/users/cstorm125/orgs",
"received_events_url": "https://api.github.com/users/cstorm125/received_events",
"repos_url": "https://api.github.com/users/cstorm125/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/cstorm125/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/cstorm125/subscriptions",
"type": "User",
"url": "https://api.github.com/users/cstorm125"
} | [] | closed | false | null | [] | null | 1 | "2021-01-07T02:12:33Z" | "2021-01-07T14:43:45Z" | "2021-01-07T14:43:28Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1695.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1695",
"merged_at": "2021-01-07T14:43:28Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1695.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1695"
} | fix bug that results in `ner_tag` always equal to 'O'. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1695/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1695/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1694 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1694/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1694/comments | https://api.github.com/repos/huggingface/datasets/issues/1694/events | https://github.com/huggingface/datasets/pull/1694 | 780,429,080 | MDExOlB1bGxSZXF1ZXN0NTUwMzI0Mjcx | 1,694 | Add OSCAR | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 10 | "2021-01-06T10:21:08Z" | "2021-01-25T09:10:33Z" | "2021-01-25T09:10:32Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1694.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1694",
"merged_at": "2021-01-25T09:10:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1694.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1694"
} | Continuation of #348
The files have been moved to S3 and only the unshuffled version is available.
Both original and deduplicated versions of each language are available.
Example of usage:
```python
from datasets import load_dataset
oscar_dedup_en = load_dataset("oscar", "unshuffled_deduplicated_en", split="train")
oscar_orig_fr = load_dataset("oscar", "unshuffled_original_fr", split="train")
```
cc @pjox @jonatasgrosman
-------------
To make the metadata generation work in parallel I did a few changes in the `datasets-cli test` command to add the `num_proc` and `proc_rank` arguments. This way you can run multiple processes for the metadata computation.
```
datasets-cli test ./datasets/oscar --save_infos --all_configs --num_proc 4 --proc_rank 0 --clear_cache --cache_dir tmp0
```
-------------
ToDo: add the dummy_data | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 2,
"laugh": 0,
"rocket": 0,
"total_count": 2,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1694/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1694/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1693 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1693/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1693/comments | https://api.github.com/repos/huggingface/datasets/issues/1693/events | https://github.com/huggingface/datasets/pull/1693 | 780,268,595 | MDExOlB1bGxSZXF1ZXN0NTUwMTc3MDEx | 1,693 | Fix reuters metadata parsing errors | {
"avatar_url": "https://avatars.githubusercontent.com/u/2238344?v=4",
"events_url": "https://api.github.com/users/jbragg/events{/privacy}",
"followers_url": "https://api.github.com/users/jbragg/followers",
"following_url": "https://api.github.com/users/jbragg/following{/other_user}",
"gists_url": "https://api.github.com/users/jbragg/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jbragg",
"id": 2238344,
"login": "jbragg",
"node_id": "MDQ6VXNlcjIyMzgzNDQ=",
"organizations_url": "https://api.github.com/users/jbragg/orgs",
"received_events_url": "https://api.github.com/users/jbragg/received_events",
"repos_url": "https://api.github.com/users/jbragg/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jbragg/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jbragg/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jbragg"
} | [] | closed | false | null | [] | null | 0 | "2021-01-06T08:26:03Z" | "2021-01-07T23:53:47Z" | "2021-01-07T14:01:22Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1693.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1693",
"merged_at": "2021-01-07T14:01:22Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1693.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1693"
} | Was missing the last entry in each metadata category | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1693/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1693/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1691 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1691/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1691/comments | https://api.github.com/repos/huggingface/datasets/issues/1691/events | https://github.com/huggingface/datasets/pull/1691 | 779,882,271 | MDExOlB1bGxSZXF1ZXN0NTQ5ODE3NTM0 | 1,691 | Updated HuggingFace Datasets README (fix typos) | {
"avatar_url": "https://avatars.githubusercontent.com/u/19637339?v=4",
"events_url": "https://api.github.com/users/8bitmp3/events{/privacy}",
"followers_url": "https://api.github.com/users/8bitmp3/followers",
"following_url": "https://api.github.com/users/8bitmp3/following{/other_user}",
"gists_url": "https://api.github.com/users/8bitmp3/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/8bitmp3",
"id": 19637339,
"login": "8bitmp3",
"node_id": "MDQ6VXNlcjE5NjM3MzM5",
"organizations_url": "https://api.github.com/users/8bitmp3/orgs",
"received_events_url": "https://api.github.com/users/8bitmp3/received_events",
"repos_url": "https://api.github.com/users/8bitmp3/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/8bitmp3/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/8bitmp3/subscriptions",
"type": "User",
"url": "https://api.github.com/users/8bitmp3"
} | [] | closed | false | null | [] | null | 0 | "2021-01-06T02:14:38Z" | "2021-01-16T23:30:47Z" | "2021-01-07T10:06:32Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1691.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1691",
"merged_at": "2021-01-07T10:06:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1691.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1691"
} | Awesome work on 🤗 Datasets. I found a couple of small typos in the README. Hope this helps.

| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 1,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1691/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1691/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1690 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1690/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1690/comments | https://api.github.com/repos/huggingface/datasets/issues/1690/events | https://github.com/huggingface/datasets/pull/1690 | 779,441,631 | MDExOlB1bGxSZXF1ZXN0NTQ5NDEwOTgw | 1,690 | Fast start up | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-05T19:07:53Z" | "2021-01-06T14:20:59Z" | "2021-01-06T14:20:58Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1690.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1690",
"merged_at": "2021-01-06T14:20:58Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1690.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1690"
} | Currently if optional dependencies such as tensorflow, torch, apache_beam, faiss and elasticsearch are installed, then it takes a long time to do `import datasets` since it imports all of these heavy dependencies.
To make a fast start up for `datasets` I changed that so that they are not imported when `datasets` is being imported. On my side it changed the import time of `datasets` from 5sec to 0.5sec, which is enjoyable.
To be able to check if optional dependencies are available without importing them I'm using `importlib_metadata`, which is part of the standard lib in python>=3.8 and was backported. The difference with `importlib` is that it also enables to get the versions of the libraries without importing them.
I added this dependency in `setup.py`. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 3,
"laugh": 0,
"rocket": 0,
"total_count": 3,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1690/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1690/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1689 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1689/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1689/comments | https://api.github.com/repos/huggingface/datasets/issues/1689/events | https://github.com/huggingface/datasets/pull/1689 | 779,107,313 | MDExOlB1bGxSZXF1ZXN0NTQ5MTEwMDgw | 1,689 | Fix ade_corpus_v2 config names | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-05T14:33:28Z" | "2021-01-05T14:55:09Z" | "2021-01-05T14:55:08Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1689.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1689",
"merged_at": "2021-01-05T14:55:08Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1689.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1689"
} | There are currently some typos in the config names of the `ade_corpus_v2` dataset, I fixed them:
- Ade_corpos_v2_classificaion -> Ade_corpus_v2_classification
- Ade_corpos_v2_drug_ade_relation -> Ade_corpus_v2_drug_ade_relation
- Ade_corpos_v2_drug_dosage_relation -> Ade_corpus_v2_drug_dosage_relation | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1689/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1689/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1688 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1688/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1688/comments | https://api.github.com/repos/huggingface/datasets/issues/1688/events | https://github.com/huggingface/datasets/pull/1688 | 779,029,685 | MDExOlB1bGxSZXF1ZXN0NTQ5MDM5ODg0 | 1,688 | Fix DaNE last example | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-05T13:29:37Z" | "2021-01-05T14:00:15Z" | "2021-01-05T14:00:13Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1688.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1688",
"merged_at": "2021-01-05T14:00:13Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1688.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1688"
} | The last example from the DaNE dataset is empty.
Fix #1686 | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1688/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1688/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1687 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1687/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1687/comments | https://api.github.com/repos/huggingface/datasets/issues/1687/events | https://github.com/huggingface/datasets/issues/1687 | 779,004,894 | MDU6SXNzdWU3NzkwMDQ4OTQ= | 1,687 | Question: Shouldn't .info be a part of DatasetDict? | {
"avatar_url": "https://avatars.githubusercontent.com/u/23721977?v=4",
"events_url": "https://api.github.com/users/KennethEnevoldsen/events{/privacy}",
"followers_url": "https://api.github.com/users/KennethEnevoldsen/followers",
"following_url": "https://api.github.com/users/KennethEnevoldsen/following{/other_user}",
"gists_url": "https://api.github.com/users/KennethEnevoldsen/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/KennethEnevoldsen",
"id": 23721977,
"login": "KennethEnevoldsen",
"node_id": "MDQ6VXNlcjIzNzIxOTc3",
"organizations_url": "https://api.github.com/users/KennethEnevoldsen/orgs",
"received_events_url": "https://api.github.com/users/KennethEnevoldsen/received_events",
"repos_url": "https://api.github.com/users/KennethEnevoldsen/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/KennethEnevoldsen/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/KennethEnevoldsen/subscriptions",
"type": "User",
"url": "https://api.github.com/users/KennethEnevoldsen"
} | [] | open | false | null | [] | null | 2 | "2021-01-05T13:08:41Z" | "2021-01-07T10:18:06Z" | null | CONTRIBUTOR | null | null | null | Currently, only `Dataset` contains the .info or .features, but as many datasets contains standard splits (train, test) and thus the underlying information is the same (or at least should be) across the datasets.
For instance:
```
>>> ds = datasets.load_dataset("conll2002", "es")
>>> ds.info
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'DatasetDict' object has no attribute 'info'
```
I could imagine that this wouldn't work for datasets dicts which hold entirely different datasets (multimodal datasets), but it seems odd that splits of the same dataset is treated the same as what is essentially different datasets.
Intuitively it would also make sense that if a dataset is supplied via. the load_dataset that is have a common .info which covers the entire dataset.
It is entirely possible that I am missing another perspective | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1687/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1687/timeline | null | null | false |
https://api.github.com/repos/huggingface/datasets/issues/1686 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1686/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1686/comments | https://api.github.com/repos/huggingface/datasets/issues/1686/events | https://github.com/huggingface/datasets/issues/1686 | 778,921,684 | MDU6SXNzdWU3Nzg5MjE2ODQ= | 1,686 | Dataset Error: DaNE contains empty samples at the end | {
"avatar_url": "https://avatars.githubusercontent.com/u/23721977?v=4",
"events_url": "https://api.github.com/users/KennethEnevoldsen/events{/privacy}",
"followers_url": "https://api.github.com/users/KennethEnevoldsen/followers",
"following_url": "https://api.github.com/users/KennethEnevoldsen/following{/other_user}",
"gists_url": "https://api.github.com/users/KennethEnevoldsen/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/KennethEnevoldsen",
"id": 23721977,
"login": "KennethEnevoldsen",
"node_id": "MDQ6VXNlcjIzNzIxOTc3",
"organizations_url": "https://api.github.com/users/KennethEnevoldsen/orgs",
"received_events_url": "https://api.github.com/users/KennethEnevoldsen/received_events",
"repos_url": "https://api.github.com/users/KennethEnevoldsen/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/KennethEnevoldsen/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/KennethEnevoldsen/subscriptions",
"type": "User",
"url": "https://api.github.com/users/KennethEnevoldsen"
} | [] | closed | false | null | [] | null | 3 | "2021-01-05T11:54:26Z" | "2021-01-05T14:01:09Z" | "2021-01-05T14:00:13Z" | CONTRIBUTOR | null | null | null | The dataset DaNE, contains empty samples at the end. It is naturally easy to remove using a filter but should probably not be there, to begin with as it can cause errors.
```python
>>> import datasets
[...]
>>> dataset = datasets.load_dataset("dane")
[...]
>>> dataset["test"][-1]
{'dep_ids': [], 'dep_labels': [], 'lemmas': [], 'morph_tags': [], 'ner_tags': [], 'pos_tags': [], 'sent_id': '', 'text': '', 'tok_ids': [], 'tokens': []}
>>> dataset["train"][-1]
{'dep_ids': [], 'dep_labels': [], 'lemmas': [], 'morph_tags': [], 'ner_tags': [], 'pos_tags': [], 'sent_id': '', 'text': '', 'tok_ids': [], 'tokens': []}
```
Best,
Kenneth | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1686/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1686/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1685 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1685/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1685/comments | https://api.github.com/repos/huggingface/datasets/issues/1685/events | https://github.com/huggingface/datasets/pull/1685 | 778,914,431 | MDExOlB1bGxSZXF1ZXN0NTQ4OTM1MzY2 | 1,685 | Update README.md of covid-tweets-japanese | {
"avatar_url": "https://avatars.githubusercontent.com/u/2755894?v=4",
"events_url": "https://api.github.com/users/forest1988/events{/privacy}",
"followers_url": "https://api.github.com/users/forest1988/followers",
"following_url": "https://api.github.com/users/forest1988/following{/other_user}",
"gists_url": "https://api.github.com/users/forest1988/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/forest1988",
"id": 2755894,
"login": "forest1988",
"node_id": "MDQ6VXNlcjI3NTU4OTQ=",
"organizations_url": "https://api.github.com/users/forest1988/orgs",
"received_events_url": "https://api.github.com/users/forest1988/received_events",
"repos_url": "https://api.github.com/users/forest1988/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/forest1988/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/forest1988/subscriptions",
"type": "User",
"url": "https://api.github.com/users/forest1988"
} | [] | closed | false | null | [] | null | 1 | "2021-01-05T11:47:27Z" | "2021-01-06T10:27:12Z" | "2021-01-06T09:31:10Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1685.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1685",
"merged_at": "2021-01-06T09:31:10Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1685.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1685"
} | Update README.md of covid-tweets-japanese added by PR https://github.com/huggingface/datasets/pull/1367 and https://github.com/huggingface/datasets/pull/1402.
- Update "Data Splits" to be more precise that no information is provided for now.
- old: [More Information Needed]
- new: No information about data splits is provided for now.
- The automatic generation of links seemed not working properly, so I added a space before and after the URL to make the links work correctly. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1685/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1685/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1684 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1684/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1684/comments | https://api.github.com/repos/huggingface/datasets/issues/1684/events | https://github.com/huggingface/datasets/pull/1684 | 778,356,196 | MDExOlB1bGxSZXF1ZXN0NTQ4NDU3NDY1 | 1,684 | Add CANER Corpus | {
"avatar_url": "https://avatars.githubusercontent.com/u/35491698?v=4",
"events_url": "https://api.github.com/users/KMFODA/events{/privacy}",
"followers_url": "https://api.github.com/users/KMFODA/followers",
"following_url": "https://api.github.com/users/KMFODA/following{/other_user}",
"gists_url": "https://api.github.com/users/KMFODA/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/KMFODA",
"id": 35491698,
"login": "KMFODA",
"node_id": "MDQ6VXNlcjM1NDkxNjk4",
"organizations_url": "https://api.github.com/users/KMFODA/orgs",
"received_events_url": "https://api.github.com/users/KMFODA/received_events",
"repos_url": "https://api.github.com/users/KMFODA/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/KMFODA/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/KMFODA/subscriptions",
"type": "User",
"url": "https://api.github.com/users/KMFODA"
} | [] | closed | false | null | [] | null | 0 | "2021-01-04T20:49:11Z" | "2021-01-25T09:09:20Z" | "2021-01-25T09:09:20Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1684.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1684",
"merged_at": "2021-01-25T09:09:20Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1684.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1684"
} | What does this PR do?
Adds the following dataset:
https://github.com/RamziSalah/Classical-Arabic-Named-Entity-Recognition-Corpus
Who can review?
@lhoestq | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1684/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1684/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1683 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1683/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1683/comments | https://api.github.com/repos/huggingface/datasets/issues/1683/events | https://github.com/huggingface/datasets/issues/1683 | 778,287,612 | MDU6SXNzdWU3NzgyODc2MTI= | 1,683 | `ArrowInvalid` occurs while running `Dataset.map()` function for DPRContext | {
"avatar_url": "https://avatars.githubusercontent.com/u/6608232?v=4",
"events_url": "https://api.github.com/users/abarbosa94/events{/privacy}",
"followers_url": "https://api.github.com/users/abarbosa94/followers",
"following_url": "https://api.github.com/users/abarbosa94/following{/other_user}",
"gists_url": "https://api.github.com/users/abarbosa94/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/abarbosa94",
"id": 6608232,
"login": "abarbosa94",
"node_id": "MDQ6VXNlcjY2MDgyMzI=",
"organizations_url": "https://api.github.com/users/abarbosa94/orgs",
"received_events_url": "https://api.github.com/users/abarbosa94/received_events",
"repos_url": "https://api.github.com/users/abarbosa94/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/abarbosa94/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/abarbosa94/subscriptions",
"type": "User",
"url": "https://api.github.com/users/abarbosa94"
} | [] | closed | false | null | [] | null | 2 | "2021-01-04T18:47:53Z" | "2021-01-04T19:04:45Z" | "2021-01-04T19:04:45Z" | CONTRIBUTOR | null | null | null | It seems to fail the final batch ):
steps to reproduce:
```
from datasets import load_dataset
from elasticsearch import Elasticsearch
import torch
from transformers import file_utils, set_seed
from transformers import DPRContextEncoder, DPRContextEncoderTokenizerFast
MAX_SEQ_LENGTH = 256
ctx_encoder = DPRContextEncoder.from_pretrained("facebook/dpr-ctx_encoder-single-nq-base", cache_dir="../datasets/")
ctx_tokenizer = DPRContextEncoderTokenizerFast.from_pretrained(
"facebook/dpr-ctx_encoder-single-nq-base",
cache_dir="..datasets/"
)
dataset = load_dataset('text',
data_files='data/raw/ARC_Corpus.txt',
cache_dir='../datasets')
torch.set_grad_enabled(False)
ds_with_embeddings = dataset.map(
lambda example: {
'embeddings': ctx_encoder(
**ctx_tokenizer(
example["text"],
padding='max_length',
truncation=True,
max_length=MAX_SEQ_LENGTH,
return_tensors="pt"
)
)[0][0].numpy(),
},
batched=True,
load_from_cache_file=False,
batch_size=1000
)
```
ARC Corpus can be obtained from [here](https://ai2-datasets.s3-us-west-2.amazonaws.com/arc/ARC-V1-Feb2018.zip)
And then the error:
```
---------------------------------------------------------------------------
ArrowInvalid Traceback (most recent call last)
<ipython-input-13-67d139bb2ed3> in <module>
14 batched=True,
15 load_from_cache_file=False,
---> 16 batch_size=1000
17 )
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc)
301 num_proc=num_proc,
302 )
--> 303 for k, dataset in self.items()
304 }
305 )
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/dataset_dict.py in <dictcomp>(.0)
301 num_proc=num_proc,
302 )
--> 303 for k, dataset in self.items()
304 }
305 )
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)
1257 fn_kwargs=fn_kwargs,
1258 new_fingerprint=new_fingerprint,
-> 1259 update_data=update_data,
1260 )
1261 else:
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/arrow_dataset.py in wrapper(*args, **kwargs)
155 }
156 # apply actual function
--> 157 out: Union["Dataset", "DatasetDict"] = func(self, *args, **kwargs)
158 datasets: List["Dataset"] = list(out.values()) if isinstance(out, dict) else [out]
159 # re-apply format to the output
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/fingerprint.py in wrapper(*args, **kwargs)
161 # Call actual function
162
--> 163 out = func(self, *args, **kwargs)
164
165 # Update fingerprint of in-place transforms + update in-place history of transforms
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, update_data)
1526 if update_data:
1527 batch = cast_to_python_objects(batch)
-> 1528 writer.write_batch(batch)
1529 if update_data:
1530 writer.finalize() # close_stream=bool(buf_writer is None)) # We only close if we are writing in a file
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/datasets/arrow_writer.py in write_batch(self, batch_examples, writer_batch_size)
276 typed_sequence = TypedSequence(batch_examples[col], type=col_type, try_type=col_try_type)
277 typed_sequence_examples[col] = typed_sequence
--> 278 pa_table = pa.Table.from_pydict(typed_sequence_examples)
279 self.write_table(pa_table)
280
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/pyarrow/table.pxi in pyarrow.lib.Table.from_pydict()
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/pyarrow/table.pxi in pyarrow.lib.Table.from_arrays()
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/pyarrow/table.pxi in pyarrow.lib.Table.validate()
~/.cache/pypoetry/virtualenvs/masters-utTTC0p8-py3.7/lib/python3.7/site-packages/pyarrow/error.pxi in pyarrow.lib.check_status()
ArrowInvalid: Column 1 named text expected length 768 but got length 1000
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1683/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1683/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1682 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1682/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1682/comments | https://api.github.com/repos/huggingface/datasets/issues/1682/events | https://github.com/huggingface/datasets/pull/1682 | 778,268,156 | MDExOlB1bGxSZXF1ZXN0NTQ4Mzg1NTk1 | 1,682 | Don't use xlrd for xlsx files | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2021-01-04T18:11:50Z" | "2021-01-04T18:13:14Z" | "2021-01-04T18:13:13Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1682.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1682",
"merged_at": "2021-01-04T18:13:13Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1682.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1682"
} | Since the latest release of `xlrd` (2.0), the support for xlsx files stopped.
Therefore we needed to use something else.
A good alternative is `openpyxl` which has also an integration with pandas si we can still call `pd.read_excel`.
I left the unused import of `openpyxl` in the dataset scripts to show users that this is a required dependency to use the scripts.
I tested the different datasets using `datasets-cli test` and the tests are successful (no missing examples). | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1682/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1682/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1681 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1681/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1681/comments | https://api.github.com/repos/huggingface/datasets/issues/1681/events | https://github.com/huggingface/datasets/issues/1681 | 777,644,163 | MDU6SXNzdWU3Nzc2NDQxNjM= | 1,681 | Dataset "dane" missing | {
"avatar_url": "https://avatars.githubusercontent.com/u/23721977?v=4",
"events_url": "https://api.github.com/users/KennethEnevoldsen/events{/privacy}",
"followers_url": "https://api.github.com/users/KennethEnevoldsen/followers",
"following_url": "https://api.github.com/users/KennethEnevoldsen/following{/other_user}",
"gists_url": "https://api.github.com/users/KennethEnevoldsen/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/KennethEnevoldsen",
"id": 23721977,
"login": "KennethEnevoldsen",
"node_id": "MDQ6VXNlcjIzNzIxOTc3",
"organizations_url": "https://api.github.com/users/KennethEnevoldsen/orgs",
"received_events_url": "https://api.github.com/users/KennethEnevoldsen/received_events",
"repos_url": "https://api.github.com/users/KennethEnevoldsen/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/KennethEnevoldsen/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/KennethEnevoldsen/subscriptions",
"type": "User",
"url": "https://api.github.com/users/KennethEnevoldsen"
} | [] | closed | false | null | [] | null | 3 | "2021-01-03T14:03:03Z" | "2021-01-05T08:35:35Z" | "2021-01-05T08:35:13Z" | CONTRIBUTOR | null | null | null | the `dane` dataset appear to be missing in the latest version (1.1.3).
```python
>>> import datasets
>>> datasets.__version__
'1.1.3'
>>> "dane" in datasets.list_datasets()
True
```
As we can see it should be present, but doesn't seem to be findable when using `load_dataset`.
```python
>>> datasets.load_dataset("dane")
Traceback (most recent call last):
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/load.py", line 267, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/utils/file_utils.py", line 300, in cached_path
output_path = get_from_cache(
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/dane/dane.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/load.py", line 278, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/utils/file_utils.py", line 300, in cached_path
output_path = get_from_cache(
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/dane/dane.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/load.py", line 588, in load_dataset
module_path, hash = prepare_module(
File "/home/kenneth/.Envs/EDP/lib/python3.8/site-packages/datasets/load.py", line 280, in prepare_module
raise FileNotFoundError(
FileNotFoundError: Couldn't find file locally at dane/dane.py, or remotely at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/dane/dane.py or https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/dane/dane.py
```
This issue might be relevant to @ophelielacroix from the Alexandra Institut whom created the data. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1681/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1681/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1680 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1680/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1680/comments | https://api.github.com/repos/huggingface/datasets/issues/1680/events | https://github.com/huggingface/datasets/pull/1680 | 777,623,053 | MDExOlB1bGxSZXF1ZXN0NTQ3ODY4MjEw | 1,680 | added TurkishProductReviews dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/41359672?v=4",
"events_url": "https://api.github.com/users/basakbuluz/events{/privacy}",
"followers_url": "https://api.github.com/users/basakbuluz/followers",
"following_url": "https://api.github.com/users/basakbuluz/following{/other_user}",
"gists_url": "https://api.github.com/users/basakbuluz/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/basakbuluz",
"id": 41359672,
"login": "basakbuluz",
"node_id": "MDQ6VXNlcjQxMzU5Njcy",
"organizations_url": "https://api.github.com/users/basakbuluz/orgs",
"received_events_url": "https://api.github.com/users/basakbuluz/received_events",
"repos_url": "https://api.github.com/users/basakbuluz/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/basakbuluz/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/basakbuluz/subscriptions",
"type": "User",
"url": "https://api.github.com/users/basakbuluz"
} | [] | closed | false | null | [] | null | 2 | "2021-01-03T11:52:59Z" | "2021-01-04T18:15:35Z" | "2021-01-04T18:15:35Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1680.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1680",
"merged_at": "2021-01-04T18:15:35Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1680.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1680"
} | This PR added **Turkish Product Reviews Dataset contains 235.165 product reviews collected online. There are 220.284 positive, 14881 negative reviews**.
- **Repository:** [turkish-text-data](https://github.com/fthbrmnby/turkish-text-data)
- **Point of Contact:** Fatih Barmanbay - @fthbrmnby | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1680/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1680/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1679 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1679/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1679/comments | https://api.github.com/repos/huggingface/datasets/issues/1679/events | https://github.com/huggingface/datasets/issues/1679 | 777,587,792 | MDU6SXNzdWU3Nzc1ODc3OTI= | 1,679 | Can't import cc100 dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/14968123?v=4",
"events_url": "https://api.github.com/users/alighofrani95/events{/privacy}",
"followers_url": "https://api.github.com/users/alighofrani95/followers",
"following_url": "https://api.github.com/users/alighofrani95/following{/other_user}",
"gists_url": "https://api.github.com/users/alighofrani95/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/alighofrani95",
"id": 14968123,
"login": "alighofrani95",
"node_id": "MDQ6VXNlcjE0OTY4MTIz",
"organizations_url": "https://api.github.com/users/alighofrani95/orgs",
"received_events_url": "https://api.github.com/users/alighofrani95/received_events",
"repos_url": "https://api.github.com/users/alighofrani95/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/alighofrani95/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/alighofrani95/subscriptions",
"type": "User",
"url": "https://api.github.com/users/alighofrani95"
} | [] | closed | false | null | [] | null | 1 | "2021-01-03T07:12:56Z" | "2022-10-05T12:42:25Z" | "2022-10-05T12:42:25Z" | NONE | null | null | null | There is some issue to import cc100 dataset.
```
from datasets import load_dataset
dataset = load_dataset("cc100")
```
FileNotFoundError: Couldn't find file at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/cc100/cc100.py
During handling of the above exception, another exception occurred:
FileNotFoundError Traceback (most recent call last)
FileNotFoundError: Couldn't find file at https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/cc100/cc100.py
During handling of the above exception, another exception occurred:
FileNotFoundError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/datasets/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)
280 raise FileNotFoundError(
281 "Couldn't find file locally at {}, or remotely at {} or {}".format(
--> 282 combined_path, github_file_path, file_path
283 )
284 )
FileNotFoundError: Couldn't find file locally at cc100/cc100.py, or remotely at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/cc100/cc100.py or https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/cc100/cc100.py | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1679/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1679/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1678 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1678/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1678/comments | https://api.github.com/repos/huggingface/datasets/issues/1678/events | https://github.com/huggingface/datasets/pull/1678 | 777,567,920 | MDExOlB1bGxSZXF1ZXN0NTQ3ODI4MTMy | 1,678 | Switchboard Dialog Act Corpus added under `datasets/swda` | {
"avatar_url": "https://avatars.githubusercontent.com/u/22454783?v=4",
"events_url": "https://api.github.com/users/gmihaila/events{/privacy}",
"followers_url": "https://api.github.com/users/gmihaila/followers",
"following_url": "https://api.github.com/users/gmihaila/following{/other_user}",
"gists_url": "https://api.github.com/users/gmihaila/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/gmihaila",
"id": 22454783,
"login": "gmihaila",
"node_id": "MDQ6VXNlcjIyNDU0Nzgz",
"organizations_url": "https://api.github.com/users/gmihaila/orgs",
"received_events_url": "https://api.github.com/users/gmihaila/received_events",
"repos_url": "https://api.github.com/users/gmihaila/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/gmihaila/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/gmihaila/subscriptions",
"type": "User",
"url": "https://api.github.com/users/gmihaila"
} | [] | closed | false | null | [] | null | 8 | "2021-01-03T03:53:41Z" | "2021-01-08T18:09:21Z" | "2021-01-05T10:06:35Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1678.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1678",
"merged_at": "2021-01-05T10:06:35Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1678.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1678"
} | Switchboard Dialog Act Corpus
Intro:
The Switchboard Dialog Act Corpus (SwDA) extends the Switchboard-1 Telephone Speech Corpus, Release 2,
with turn/utterance-level dialog-act tags. The tags summarize syntactic, semantic, and pragmatic information
about the associated turn. The SwDA project was undertaken at UC Boulder in the late 1990s.
Details:
[homepage](http://compprag.christopherpotts.net/swda.html)
[repo](https://github.com/NathanDuran/Switchboard-Corpus/raw/master/swda_data/)
I believe this is an important dataset to have since there is no dataset related to dialogue act added.
I didn't find any formatting for pull request. I hope all this information is enough.
For any support please contact me. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1678/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1678/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1677 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1677/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1677/comments | https://api.github.com/repos/huggingface/datasets/issues/1677/events | https://github.com/huggingface/datasets/pull/1677 | 777,553,383 | MDExOlB1bGxSZXF1ZXN0NTQ3ODE3ODI1 | 1,677 | Switchboard Dialog Act Corpus added under `datasets/swda` | {
"avatar_url": "https://avatars.githubusercontent.com/u/22454783?v=4",
"events_url": "https://api.github.com/users/gmihaila/events{/privacy}",
"followers_url": "https://api.github.com/users/gmihaila/followers",
"following_url": "https://api.github.com/users/gmihaila/following{/other_user}",
"gists_url": "https://api.github.com/users/gmihaila/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/gmihaila",
"id": 22454783,
"login": "gmihaila",
"node_id": "MDQ6VXNlcjIyNDU0Nzgz",
"organizations_url": "https://api.github.com/users/gmihaila/orgs",
"received_events_url": "https://api.github.com/users/gmihaila/received_events",
"repos_url": "https://api.github.com/users/gmihaila/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/gmihaila/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/gmihaila/subscriptions",
"type": "User",
"url": "https://api.github.com/users/gmihaila"
} | [] | closed | false | null | [] | null | 1 | "2021-01-03T01:16:42Z" | "2021-01-03T02:55:57Z" | "2021-01-03T02:55:56Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1677.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1677",
"merged_at": null,
"patch_url": "https://github.com/huggingface/datasets/pull/1677.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1677"
} | Pleased to announced that I added my first dataset **Switchboard Dialog Act Corpus**.
I think this is an important datasets to be added since it is the only one related to dialogue act classification.
Hope the pull request is ok. Wasn't able to see any special formatting for the pull request form.
The Switchboard Dialog Act Corpus (SwDA) extends the Switchboard-1 Telephone Speech Corpus, Release 2,
with turn/utterance-level dialog-act tags. The tags summarize syntactic, semantic, and pragmatic information
about the associated turn. The SwDA project was undertaken at UC Boulder in the late 1990s.
[webpage](http://compprag.christopherpotts.net/swda.html)
[repo](https://github.com/NathanDuran/Switchboard-Corpus/raw/master/swda_data/)
Please contact me for any support!
All tests passed and followed all steps in the contribution guide!
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1677/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1677/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1676 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1676/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1676/comments | https://api.github.com/repos/huggingface/datasets/issues/1676/events | https://github.com/huggingface/datasets/pull/1676 | 777,477,645 | MDExOlB1bGxSZXF1ZXN0NTQ3NzY1OTY3 | 1,676 | new version of Ted Talks IWSLT (WIT3) | {
"avatar_url": "https://avatars.githubusercontent.com/u/9033954?v=4",
"events_url": "https://api.github.com/users/skyprince999/events{/privacy}",
"followers_url": "https://api.github.com/users/skyprince999/followers",
"following_url": "https://api.github.com/users/skyprince999/following{/other_user}",
"gists_url": "https://api.github.com/users/skyprince999/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/skyprince999",
"id": 9033954,
"login": "skyprince999",
"node_id": "MDQ6VXNlcjkwMzM5NTQ=",
"organizations_url": "https://api.github.com/users/skyprince999/orgs",
"received_events_url": "https://api.github.com/users/skyprince999/received_events",
"repos_url": "https://api.github.com/users/skyprince999/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/skyprince999/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/skyprince999/subscriptions",
"type": "User",
"url": "https://api.github.com/users/skyprince999"
} | [] | closed | false | null | [] | null | 3 | "2021-01-02T15:30:03Z" | "2021-01-14T10:10:19Z" | "2021-01-14T10:10:19Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1676.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1676",
"merged_at": "2021-01-14T10:10:19Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1676.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1676"
} | In the previous iteration #1608 I had used language pairs. Which created 21,582 configs (109*108) !!!
Now, TED talks in _each language_ is a separate config. So it's more cleaner with _just 109 configs_ (one for each language). Dummy files were created manually.
Locally I was able to clear the `python datasets-cli test datasets/......` . Which created the `dataset_info.json` file . The test for the dummy files was also cleared. However couldn't figure out how to specify the local data folder for the real dataset
**Note: that this requires manual download of the dataset.**
**Note2: The high number of _Files changed (112)_ is because of the large number of dummy files/configs!** | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1676/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1676/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1675 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1675/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1675/comments | https://api.github.com/repos/huggingface/datasets/issues/1675/events | https://github.com/huggingface/datasets/issues/1675 | 777,367,320 | MDU6SXNzdWU3NzczNjczMjA= | 1,675 | Add the 800GB Pile dataset? | {
"avatar_url": "https://avatars.githubusercontent.com/u/26859204?v=4",
"events_url": "https://api.github.com/users/lewtun/events{/privacy}",
"followers_url": "https://api.github.com/users/lewtun/followers",
"following_url": "https://api.github.com/users/lewtun/following{/other_user}",
"gists_url": "https://api.github.com/users/lewtun/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lewtun",
"id": 26859204,
"login": "lewtun",
"node_id": "MDQ6VXNlcjI2ODU5MjA0",
"organizations_url": "https://api.github.com/users/lewtun/orgs",
"received_events_url": "https://api.github.com/users/lewtun/received_events",
"repos_url": "https://api.github.com/users/lewtun/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lewtun/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lewtun/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lewtun"
} | [
{
"color": "e99695",
"default": false,
"description": "Requesting to add a new dataset",
"id": 2067376369,
"name": "dataset request",
"node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5",
"url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request"
}
] | closed | false | {
"avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4",
"events_url": "https://api.github.com/users/albertvillanova/events{/privacy}",
"followers_url": "https://api.github.com/users/albertvillanova/followers",
"following_url": "https://api.github.com/users/albertvillanova/following{/other_user}",
"gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/albertvillanova",
"id": 8515462,
"login": "albertvillanova",
"node_id": "MDQ6VXNlcjg1MTU0NjI=",
"organizations_url": "https://api.github.com/users/albertvillanova/orgs",
"received_events_url": "https://api.github.com/users/albertvillanova/received_events",
"repos_url": "https://api.github.com/users/albertvillanova/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions",
"type": "User",
"url": "https://api.github.com/users/albertvillanova"
} | [
{
"avatar_url": "https://avatars.githubusercontent.com/u/8515462?v=4",
"events_url": "https://api.github.com/users/albertvillanova/events{/privacy}",
"followers_url": "https://api.github.com/users/albertvillanova/followers",
"following_url": "https://api.github.com/users/albertvillanova/following{/other_user}",
"gists_url": "https://api.github.com/users/albertvillanova/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/albertvillanova",
"id": 8515462,
"login": "albertvillanova",
"node_id": "MDQ6VXNlcjg1MTU0NjI=",
"organizations_url": "https://api.github.com/users/albertvillanova/orgs",
"received_events_url": "https://api.github.com/users/albertvillanova/received_events",
"repos_url": "https://api.github.com/users/albertvillanova/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/albertvillanova/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/albertvillanova/subscriptions",
"type": "User",
"url": "https://api.github.com/users/albertvillanova"
}
] | null | 7 | "2021-01-01T22:58:12Z" | "2021-12-01T15:29:07Z" | "2021-12-01T15:29:07Z" | MEMBER | null | null | null | ## Adding a Dataset
- **Name:** The Pile
- **Description:** The Pile is a 825 GiB diverse, open source language modelling data set that consists of 22 smaller, high-quality datasets combined together. See [here](https://twitter.com/nabla_theta/status/1345130408170541056?s=20) for the Twitter announcement
- **Paper:** https://pile.eleuther.ai/paper.pdf
- **Data:** https://pile.eleuther.ai/
- **Motivation:** Enables hardcore (GPT-3 scale!) language modelling
## Remarks
Given the extreme size of this dataset, I'm not sure how feasible this will be to include in `datasets` 🤯 . I'm also unsure how many `datasets` users are pretraining LMs, so the usage of this dataset may not warrant the effort to integrate it.
| {
"+1": 5,
"-1": 0,
"confused": 1,
"eyes": 2,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 5,
"total_count": 13,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1675/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1675/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1674 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1674/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1674/comments | https://api.github.com/repos/huggingface/datasets/issues/1674/events | https://github.com/huggingface/datasets/issues/1674 | 777,321,840 | MDU6SXNzdWU3NzczMjE4NDA= | 1,674 | dutch_social can't be loaded | {
"avatar_url": "https://avatars.githubusercontent.com/u/10134844?v=4",
"events_url": "https://api.github.com/users/koenvandenberge/events{/privacy}",
"followers_url": "https://api.github.com/users/koenvandenberge/followers",
"following_url": "https://api.github.com/users/koenvandenberge/following{/other_user}",
"gists_url": "https://api.github.com/users/koenvandenberge/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/koenvandenberge",
"id": 10134844,
"login": "koenvandenberge",
"node_id": "MDQ6VXNlcjEwMTM0ODQ0",
"organizations_url": "https://api.github.com/users/koenvandenberge/orgs",
"received_events_url": "https://api.github.com/users/koenvandenberge/received_events",
"repos_url": "https://api.github.com/users/koenvandenberge/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/koenvandenberge/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/koenvandenberge/subscriptions",
"type": "User",
"url": "https://api.github.com/users/koenvandenberge"
} | [] | closed | false | null | [] | null | 8 | "2021-01-01T17:37:08Z" | "2022-10-05T13:03:26Z" | "2022-10-05T13:03:26Z" | NONE | null | null | null | Hi all,
I'm trying to import the `dutch_social` dataset described [here](https://huggingface.co/datasets/dutch_social).
However, the code that should load the data doesn't seem to be working, in particular because the corresponding files can't be found at the provided links.
```
(base) Koens-MacBook-Pro:~ koenvandenberge$ python
Python 3.7.4 (default, Aug 13 2019, 15:17:50)
[Clang 4.0.1 (tags/RELEASE_401/final)] :: Anaconda, Inc. on darwin
Type "help", "copyright", "credits" or "license" for more information.
>>> from datasets import load_dataset
dataset = load_dataset(
'dutch_social')
>>> dataset = load_dataset(
... 'dutch_social')
Traceback (most recent call last):
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/load.py", line 267, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 308, in cached_path
use_etag=download_config.use_etag,
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/dutch_social/dutch_social.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/load.py", line 278, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 308, in cached_path
use_etag=download_config.use_etag,
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/dutch_social/dutch_social.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 2, in <module>
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/load.py", line 589, in load_dataset
path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True
File "/Users/koenvandenberge/opt/anaconda3/lib/python3.7/site-packages/datasets/load.py", line 282, in prepare_module
combined_path, github_file_path, file_path
FileNotFoundError: Couldn't find file locally at dutch_social/dutch_social.py, or remotely at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/dutch_social/dutch_social.py or https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/dutch_social/dutch_social.py
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1674/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1674/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1673 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1673/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1673/comments | https://api.github.com/repos/huggingface/datasets/issues/1673/events | https://github.com/huggingface/datasets/issues/1673 | 777,263,651 | MDU6SXNzdWU3NzcyNjM2NTE= | 1,673 | Unable to Download Hindi Wikipedia Dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/30871963?v=4",
"events_url": "https://api.github.com/users/aditya3498/events{/privacy}",
"followers_url": "https://api.github.com/users/aditya3498/followers",
"following_url": "https://api.github.com/users/aditya3498/following{/other_user}",
"gists_url": "https://api.github.com/users/aditya3498/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/aditya3498",
"id": 30871963,
"login": "aditya3498",
"node_id": "MDQ6VXNlcjMwODcxOTYz",
"organizations_url": "https://api.github.com/users/aditya3498/orgs",
"received_events_url": "https://api.github.com/users/aditya3498/received_events",
"repos_url": "https://api.github.com/users/aditya3498/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/aditya3498/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/aditya3498/subscriptions",
"type": "User",
"url": "https://api.github.com/users/aditya3498"
} | [] | closed | false | null | [] | null | 6 | "2021-01-01T10:52:53Z" | "2021-01-05T10:22:12Z" | "2021-01-05T10:22:12Z" | NONE | null | null | null | I used the Dataset Library in Python to load the wikipedia dataset with the Hindi Config 20200501.hi along with something called beam_runner='DirectRunner' and it keeps giving me the error that the file is not found. I have attached the screenshot of the error and the code both. Please help me to understand how to resolve this issue.


| {
"+1": 1,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1673/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1673/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1672 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1672/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1672/comments | https://api.github.com/repos/huggingface/datasets/issues/1672/events | https://github.com/huggingface/datasets/issues/1672 | 777,258,941 | MDU6SXNzdWU3NzcyNTg5NDE= | 1,672 | load_dataset hang on file_lock | {
"avatar_url": "https://avatars.githubusercontent.com/u/69860107?v=4",
"events_url": "https://api.github.com/users/tomacai/events{/privacy}",
"followers_url": "https://api.github.com/users/tomacai/followers",
"following_url": "https://api.github.com/users/tomacai/following{/other_user}",
"gists_url": "https://api.github.com/users/tomacai/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/tomacai",
"id": 69860107,
"login": "tomacai",
"node_id": "MDQ6VXNlcjY5ODYwMTA3",
"organizations_url": "https://api.github.com/users/tomacai/orgs",
"received_events_url": "https://api.github.com/users/tomacai/received_events",
"repos_url": "https://api.github.com/users/tomacai/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/tomacai/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/tomacai/subscriptions",
"type": "User",
"url": "https://api.github.com/users/tomacai"
} | [] | closed | false | null | [] | null | 3 | "2021-01-01T10:25:07Z" | "2021-03-31T16:24:13Z" | "2021-01-01T11:47:36Z" | NONE | null | null | null | I am trying to load the squad dataset. Fails on Windows 10 but succeeds in Colab.
Transformers: 3.3.1
Datasets: 1.0.2
Windows 10 (also tested in WSL)
```
datasets.logging.set_verbosity_debug()
datasets.
train_dataset = load_dataset('squad', split='train')
valid_dataset = load_dataset('squad', split='validation')
train_dataset.features
```
```
https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py not found in cache or force_download set to True, downloading to C:\Users\simpl\.cache\huggingface\datasets\tmpzj_o_6u7
Downloading:
5.24k/? [00:00<00:00, 134kB/s]
storing https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py in cache at C:\Users\simpl\.cache\huggingface\datasets\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py
creating metadata file for C:\Users\simpl\.cache\huggingface\datasets\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py
Checking C:\Users\simpl\.cache\huggingface\datasets\f6877c8d2e01e8fcb60dc101be28b54a7522feac756deb9ac5c39c6d8ebef1ce.85f43de978b9b25921cb78d7a2f2b350c04acdbaedb9ecb5f7101cd7c0950e68.py for additional imports.
Found main folder for dataset https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py at C:\Users\simpl\.cache\huggingface\modules\datasets_modules\datasets\squad
Found specific version folder for dataset https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py at C:\Users\simpl\.cache\huggingface\modules\datasets_modules\datasets\squad\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41
Found script file from https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py to C:\Users\simpl\.cache\huggingface\modules\datasets_modules\datasets\squad\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41\squad.py
Couldn't find dataset infos file at https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad\dataset_infos.json
Found metadata file for dataset https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/squad.py at C:\Users\simpl\.cache\huggingface\modules\datasets_modules\datasets\squad\1244d044b266a5e4dbd4174d23cb995eead372fbca31a03edc3f8a132787af41\squad.json
No config specified, defaulting to first: squad/plain_text
```
Interrupting the jupyter kernel we are in a file lock.
In Google Colab the download is ok. In contrast to a local run in colab dataset_infos.json is downloaded
```
https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/squad/dataset_infos.json not found in cache or force_download set to True, downloading to /root/.cache/huggingface/datasets/tmptl9ha_ad
Downloading:
2.19k/? [00:00<00:00, 26.2kB/s]
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1672/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1672/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1671 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1671/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1671/comments | https://api.github.com/repos/huggingface/datasets/issues/1671/events | https://github.com/huggingface/datasets/issues/1671 | 776,652,193 | MDU6SXNzdWU3NzY2NTIxOTM= | 1,671 | connection issue | {
"avatar_url": "https://avatars.githubusercontent.com/u/73364383?v=4",
"events_url": "https://api.github.com/users/rabeehkarimimahabadi/events{/privacy}",
"followers_url": "https://api.github.com/users/rabeehkarimimahabadi/followers",
"following_url": "https://api.github.com/users/rabeehkarimimahabadi/following{/other_user}",
"gists_url": "https://api.github.com/users/rabeehkarimimahabadi/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/rabeehkarimimahabadi",
"id": 73364383,
"login": "rabeehkarimimahabadi",
"node_id": "MDQ6VXNlcjczMzY0Mzgz",
"organizations_url": "https://api.github.com/users/rabeehkarimimahabadi/orgs",
"received_events_url": "https://api.github.com/users/rabeehkarimimahabadi/received_events",
"repos_url": "https://api.github.com/users/rabeehkarimimahabadi/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/rabeehkarimimahabadi/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/rabeehkarimimahabadi/subscriptions",
"type": "User",
"url": "https://api.github.com/users/rabeehkarimimahabadi"
} | [] | closed | false | null | [] | null | 2 | "2020-12-30T21:56:20Z" | "2022-10-05T12:42:12Z" | "2022-10-05T12:42:12Z" | NONE | null | null | null | Hi
I am getting this connection issue, resulting in large failure on cloud, @lhoestq I appreciate your help on this.
If I want to keep the codes the same, so not using save_to_disk, load_from_disk, but save the datastes in the way load_dataset reads from and copy the files in the same folder the datasets library reads from, could you assist me how this can be done, thanks
I tried to do read the data, save it to a path and then set HF_HOME, which does not work and this is still not reading from the old set path, could you assist me how to save the datasets in a path, and let dataset library read from this path to avoid connection issue. thanks
```
imdb = datasets.load_dataset("imdb")
imdb.save_to_disk("/idiap/temp/rkarimi/hf_datasets/imdb")
>>> os.environ["HF_HOME"]="/idiap/temp/rkarimi/hf_datasets/"
>>> imdb = datasets.load_dataset("imdb")
Reusing dataset imdb (/idiap/temp/rkarimi/cache_home_2/datasets/imdb/plain_text/1.0.0/90099cb476936b753383ba2ae6ab2eae419b2e87f71cd5189cb9c8e5814d12a3)
```
I tried afterwards to set HF_HOME in bash, this makes it read from it, but it cannot let dataset library load from the saved path and still downloading data. could you tell me how to fix this issue @lhoestq thanks
Also this is on cloud, so I save them in a path, copy it to "another machine" to load the data
### Error stack
```
Traceback (most recent call last):
File "./finetune_t5_trainer.py", line 344, in <module>
main()
File "./finetune_t5_trainer.py", line 232, in main
for task in data_args.eval_tasks} if training_args.do_test else None
File "./finetune_t5_trainer.py", line 232, in <dictcomp>
for task in data_args.eval_tasks} if training_args.do_test else None
File "/workdir/seq2seq/data/tasks.py", line 136, in get_dataset
split = self.get_sampled_split(split, n_obs)
File "/workdir/seq2seq/data/tasks.py", line 64, in get_sampled_split
dataset = self.load_dataset(split)
File "/workdir/seq2seq/data/tasks.py", line 454, in load_dataset
split=split, script_version="master")
File "/usr/local/lib/python3.6/dist-packages/datasets/load.py", line 589, in load_dataset
path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True
File "/usr/local/lib/python3.6/dist-packages/datasets/load.py", line 263, in prepare_module
head_hf_s3(path, filename=name, dataset=dataset)
File "/usr/local/lib/python3.6/dist-packages/datasets/utils/file_utils.py", line 200, in head_hf_s3
return http_head(hf_bucket_url(identifier=identifier, filename=filename, use_cdn=use_cdn, dataset=dataset))
File "/usr/local/lib/python3.6/dist-packages/datasets/utils/file_utils.py", line 403, in http_head
url, proxies=proxies, headers=headers, cookies=cookies, allow_redirects=allow_redirects, timeout=timeout
File "/usr/local/lib/python3.6/dist-packages/requests/api.py", line 104, in head
return request('head', url, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/requests/api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/requests/sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python3.6/dist-packages/requests/sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/requests/adapters.py", line 504, in send
raise ConnectTimeout(e, request=request)
requests.exceptions.ConnectTimeout: HTTPSConnectionPool(host='s3.amazonaws.com', port=443): Max retries exceeded with url: /datasets.huggingface.co/datasets/datasets/glue/glue.py (Caused by ConnectTimeoutError(<urllib3.connection.HTTPSConnection object at 0x7ff6d6c60a20>, 'Connection to s3.amazonaws.com timed out. (connect timeout=10)'))
```
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1671/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1671/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1670 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1670/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1670/comments | https://api.github.com/repos/huggingface/datasets/issues/1670/events | https://github.com/huggingface/datasets/issues/1670 | 776,608,579 | MDU6SXNzdWU3NzY2MDg1Nzk= | 1,670 | wiki_dpr pre-processing performance | {
"avatar_url": "https://avatars.githubusercontent.com/u/753898?v=4",
"events_url": "https://api.github.com/users/dbarnhart/events{/privacy}",
"followers_url": "https://api.github.com/users/dbarnhart/followers",
"following_url": "https://api.github.com/users/dbarnhart/following{/other_user}",
"gists_url": "https://api.github.com/users/dbarnhart/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/dbarnhart",
"id": 753898,
"login": "dbarnhart",
"node_id": "MDQ6VXNlcjc1Mzg5OA==",
"organizations_url": "https://api.github.com/users/dbarnhart/orgs",
"received_events_url": "https://api.github.com/users/dbarnhart/received_events",
"repos_url": "https://api.github.com/users/dbarnhart/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/dbarnhart/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dbarnhart/subscriptions",
"type": "User",
"url": "https://api.github.com/users/dbarnhart"
} | [
{
"color": "a2eeef",
"default": true,
"description": "New feature or request",
"id": 1935892871,
"name": "enhancement",
"node_id": "MDU6TGFiZWwxOTM1ODkyODcx",
"url": "https://api.github.com/repos/huggingface/datasets/labels/enhancement"
},
{
"color": "72f99f",
"default": false,
"description": "Discussions on the datasets",
"id": 2067401494,
"name": "Dataset discussion",
"node_id": "MDU6TGFiZWwyMDY3NDAxNDk0",
"url": "https://api.github.com/repos/huggingface/datasets/labels/Dataset%20discussion"
}
] | open | false | null | [] | null | 3 | "2020-12-30T19:41:43Z" | "2021-01-28T09:41:36Z" | null | NONE | null | null | null | I've been working with wiki_dpr and noticed that the dataset processing is seriously impaired in performance [1]. It takes about 12h to process the entire dataset. Most of this time is simply loading and processing the data, but the actual indexing is also quite slow (3h).
I won't repeat the concerns around multiprocessing as they are addressed in other issues (#786), but this is the first obvious thing to do. Using cython to speed up the text manipulation may be also help. Loading and processing a dataset of this size in under 15 minutes does not seem unreasonable on a modern multi-core machine. I have hit such targets myself on similar tasks. Would love to see this improve.
The other issue is that it takes 3h to construct the FAISS index. If only we could use GPUs with HNSW, but we can't. My sharded GPU indexing code can build an IVF + PQ index in 10 minutes on 20 million vectors. Still, 3h seems slow even for the CPU.
It looks like HF is adding only 1000 vectors at a time by default [2], whereas the faiss benchmarks adds 1 million vectors at a time (effectively) [3]. It's possible the runtime could be reduced with a larger batch. Also, it looks like project dependencies ultimately use OpenBLAS, but this is known to have issues when combined with OpenMP, which HNSW does [3]. A workaround is to set the environment variable `OMP_WAIT_POLICY=PASSIVE` via `os.environ` or similar.
References:
[1] https://github.com/huggingface/datasets/blob/master/datasets/wiki_dpr/wiki_dpr.py
[2] https://github.com/huggingface/datasets/blob/master/src/datasets/search.py
[3] https://github.com/facebookresearch/faiss/blob/master/benchs/bench_hnsw.py
[4] https://github.com/facebookresearch/faiss/issues/422 | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1670/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1670/timeline | null | null | false |
https://api.github.com/repos/huggingface/datasets/issues/1669 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1669/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1669/comments | https://api.github.com/repos/huggingface/datasets/issues/1669/events | https://github.com/huggingface/datasets/issues/1669 | 776,608,386 | MDU6SXNzdWU3NzY2MDgzODY= | 1,669 | wiki_dpr dataset pre-processesing performance | {
"avatar_url": "https://avatars.githubusercontent.com/u/753898?v=4",
"events_url": "https://api.github.com/users/dbarnhart/events{/privacy}",
"followers_url": "https://api.github.com/users/dbarnhart/followers",
"following_url": "https://api.github.com/users/dbarnhart/following{/other_user}",
"gists_url": "https://api.github.com/users/dbarnhart/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/dbarnhart",
"id": 753898,
"login": "dbarnhart",
"node_id": "MDQ6VXNlcjc1Mzg5OA==",
"organizations_url": "https://api.github.com/users/dbarnhart/orgs",
"received_events_url": "https://api.github.com/users/dbarnhart/received_events",
"repos_url": "https://api.github.com/users/dbarnhart/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/dbarnhart/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dbarnhart/subscriptions",
"type": "User",
"url": "https://api.github.com/users/dbarnhart"
} | [] | closed | false | null | [] | null | 1 | "2020-12-30T19:41:09Z" | "2020-12-30T19:42:25Z" | "2020-12-30T19:42:25Z" | NONE | null | null | null | I've been working with wiki_dpr and noticed that the dataset processing is seriously impaired in performance [1]. It takes about 12h to process the entire dataset. Most of this time is simply loading and processing the data, but the actual indexing is also quite slow (3h).
I won't repeat the concerns around multiprocessing as they are addressed in other issues (#786), but this is the first obvious thing to do. Using cython to speed up the text manipulation may be also help. Loading and processing a dataset of this size in under 15 minutes does not seem unreasonable on a modern multi-core machine. I have hit such targets myself on similar tasks. Would love to see this improve.
The other issue is that it takes 3h to construct the FAISS index. If only we could use GPUs with HNSW, but we can't. My sharded GPU indexing code can build an IVF + PQ index in 10 minutes on 20 million vectors. Still, 3h seems slow even for the CPU.
It looks like HF is adding only 1000 vectors at a time by default [2], whereas the faiss benchmarks adds 1 million vectors at a time (effectively) [3]. It's possible the runtime could be reduced with a larger batch. Also, it looks like project dependencies ultimately use OpenBLAS, but this is known to have issues when combined with OpenMP, which HNSW does [3]. A workaround is to set the environment variable `OMP_WAIT_POLICY=PASSIVE` via `os.environ` or similar.
References:
[1] https://github.com/huggingface/datasets/blob/master/datasets/wiki_dpr/wiki_dpr.py
[2] https://github.com/huggingface/datasets/blob/master/src/datasets/search.py
[3] https://github.com/facebookresearch/faiss/blob/master/benchs/bench_hnsw.py
[4] https://github.com/facebookresearch/faiss/issues/422 | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1669/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1669/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1668 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1668/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1668/comments | https://api.github.com/repos/huggingface/datasets/issues/1668/events | https://github.com/huggingface/datasets/pull/1668 | 776,552,854 | MDExOlB1bGxSZXF1ZXN0NTQ3MDIxODI0 | 1,668 | xed_en_fi dataset Cleanup | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2020-12-30T17:11:18Z" | "2020-12-30T17:22:44Z" | "2020-12-30T17:22:43Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1668.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1668",
"merged_at": "2020-12-30T17:22:43Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1668.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1668"
} | Fix ClassLabel feature type and minor mistakes in the dataset card | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1668/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1668/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1667 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1667/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1667/comments | https://api.github.com/repos/huggingface/datasets/issues/1667/events | https://github.com/huggingface/datasets/pull/1667 | 776,446,658 | MDExOlB1bGxSZXF1ZXN0NTQ2OTM4MjAy | 1,667 | Fix NER metric example in Overview notebook | {
"avatar_url": "https://avatars.githubusercontent.com/u/53588015?v=4",
"events_url": "https://api.github.com/users/jungwhank/events{/privacy}",
"followers_url": "https://api.github.com/users/jungwhank/followers",
"following_url": "https://api.github.com/users/jungwhank/following{/other_user}",
"gists_url": "https://api.github.com/users/jungwhank/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jungwhank",
"id": 53588015,
"login": "jungwhank",
"node_id": "MDQ6VXNlcjUzNTg4MDE1",
"organizations_url": "https://api.github.com/users/jungwhank/orgs",
"received_events_url": "https://api.github.com/users/jungwhank/received_events",
"repos_url": "https://api.github.com/users/jungwhank/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jungwhank/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jungwhank/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jungwhank"
} | [] | closed | false | null | [] | null | 0 | "2020-12-30T13:05:19Z" | "2020-12-31T01:12:08Z" | "2020-12-30T17:21:51Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1667.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1667",
"merged_at": "2020-12-30T17:21:51Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1667.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1667"
} | Fix errors in `NER metric example` section in `Overview.ipynb`.
```
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-37-ee559b166e25> in <module>()
----> 1 ner_metric = load_metric('seqeval')
2 references = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
3 predictions = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
4 ner_metric.compute(predictions, references)
/usr/local/lib/python3.6/dist-packages/datasets/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)
340 if needs_to_be_installed:
341 raise ImportError(
--> 342 f"To be able to use this {module_type}, you need to install the following dependencies"
343 f"{[lib_name for lib_name, lib_path in needs_to_be_installed]} using 'pip install "
344 f"{' '.join([lib_path for lib_name, lib_path in needs_to_be_installed])}' for instance'"
ImportError: To be able to use this metric, you need to install the following dependencies['seqeval'] using 'pip install seqeval' for instance'
```
```
ValueError Traceback (most recent call last)
<ipython-input-39-ee559b166e25> in <module>()
2 references = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
3 predictions = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
----> 4 ner_metric.compute(predictions, references)
/usr/local/lib/python3.6/dist-packages/datasets/metric.py in compute(self, *args, **kwargs)
378 """
379 if args:
--> 380 raise ValueError("Please call `compute` using keyword arguments.")
381
382 predictions = kwargs.pop("predictions", None)
ValueError: Please call `compute` using keyword arguments.
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1667/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1667/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1666 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1666/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1666/comments | https://api.github.com/repos/huggingface/datasets/issues/1666/events | https://github.com/huggingface/datasets/pull/1666 | 776,432,006 | MDExOlB1bGxSZXF1ZXN0NTQ2OTI2MzQw | 1,666 | Add language to dataset card for Makhzan dataset. | {
"avatar_url": "https://avatars.githubusercontent.com/u/14899066?v=4",
"events_url": "https://api.github.com/users/arkhalid/events{/privacy}",
"followers_url": "https://api.github.com/users/arkhalid/followers",
"following_url": "https://api.github.com/users/arkhalid/following{/other_user}",
"gists_url": "https://api.github.com/users/arkhalid/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/arkhalid",
"id": 14899066,
"login": "arkhalid",
"node_id": "MDQ6VXNlcjE0ODk5MDY2",
"organizations_url": "https://api.github.com/users/arkhalid/orgs",
"received_events_url": "https://api.github.com/users/arkhalid/received_events",
"repos_url": "https://api.github.com/users/arkhalid/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/arkhalid/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/arkhalid/subscriptions",
"type": "User",
"url": "https://api.github.com/users/arkhalid"
} | [] | closed | false | null | [] | null | 0 | "2020-12-30T12:25:52Z" | "2020-12-30T17:20:35Z" | "2020-12-30T17:20:35Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1666.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1666",
"merged_at": "2020-12-30T17:20:35Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1666.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1666"
} | Add language to dataset card. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1666/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1666/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1665 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1665/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1665/comments | https://api.github.com/repos/huggingface/datasets/issues/1665/events | https://github.com/huggingface/datasets/pull/1665 | 776,431,087 | MDExOlB1bGxSZXF1ZXN0NTQ2OTI1NTgw | 1,665 | Add language to dataset card for Counter dataset. | {
"avatar_url": "https://avatars.githubusercontent.com/u/14899066?v=4",
"events_url": "https://api.github.com/users/arkhalid/events{/privacy}",
"followers_url": "https://api.github.com/users/arkhalid/followers",
"following_url": "https://api.github.com/users/arkhalid/following{/other_user}",
"gists_url": "https://api.github.com/users/arkhalid/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/arkhalid",
"id": 14899066,
"login": "arkhalid",
"node_id": "MDQ6VXNlcjE0ODk5MDY2",
"organizations_url": "https://api.github.com/users/arkhalid/orgs",
"received_events_url": "https://api.github.com/users/arkhalid/received_events",
"repos_url": "https://api.github.com/users/arkhalid/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/arkhalid/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/arkhalid/subscriptions",
"type": "User",
"url": "https://api.github.com/users/arkhalid"
} | [] | closed | false | null | [] | null | 0 | "2020-12-30T12:23:20Z" | "2020-12-30T17:20:20Z" | "2020-12-30T17:20:20Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1665.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1665",
"merged_at": "2020-12-30T17:20:20Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1665.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1665"
} | Add language. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1665/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1665/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1664 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1664/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1664/comments | https://api.github.com/repos/huggingface/datasets/issues/1664/events | https://github.com/huggingface/datasets/pull/1664 | 775,956,441 | MDExOlB1bGxSZXF1ZXN0NTQ2NTM1NDcy | 1,664 | removed \n in labels | {
"avatar_url": "https://avatars.githubusercontent.com/u/19718818?v=4",
"events_url": "https://api.github.com/users/bhavitvyamalik/events{/privacy}",
"followers_url": "https://api.github.com/users/bhavitvyamalik/followers",
"following_url": "https://api.github.com/users/bhavitvyamalik/following{/other_user}",
"gists_url": "https://api.github.com/users/bhavitvyamalik/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/bhavitvyamalik",
"id": 19718818,
"login": "bhavitvyamalik",
"node_id": "MDQ6VXNlcjE5NzE4ODE4",
"organizations_url": "https://api.github.com/users/bhavitvyamalik/orgs",
"received_events_url": "https://api.github.com/users/bhavitvyamalik/received_events",
"repos_url": "https://api.github.com/users/bhavitvyamalik/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/bhavitvyamalik/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/bhavitvyamalik/subscriptions",
"type": "User",
"url": "https://api.github.com/users/bhavitvyamalik"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T15:41:43Z" | "2020-12-30T17:18:49Z" | "2020-12-30T17:18:49Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1664.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1664",
"merged_at": "2020-12-30T17:18:49Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1664.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1664"
} | updated social_i_qa labels as per #1633 | {
"+1": 1,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1664/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1664/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1663 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1663/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1663/comments | https://api.github.com/repos/huggingface/datasets/issues/1663/events | https://github.com/huggingface/datasets/pull/1663 | 775,914,320 | MDExOlB1bGxSZXF1ZXN0NTQ2NTAzMjg5 | 1,663 | update saving and loading methods for faiss index so to accept path l… | {
"avatar_url": "https://avatars.githubusercontent.com/u/11614798?v=4",
"events_url": "https://api.github.com/users/tslott/events{/privacy}",
"followers_url": "https://api.github.com/users/tslott/followers",
"following_url": "https://api.github.com/users/tslott/following{/other_user}",
"gists_url": "https://api.github.com/users/tslott/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/tslott",
"id": 11614798,
"login": "tslott",
"node_id": "MDQ6VXNlcjExNjE0Nzk4",
"organizations_url": "https://api.github.com/users/tslott/orgs",
"received_events_url": "https://api.github.com/users/tslott/received_events",
"repos_url": "https://api.github.com/users/tslott/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/tslott/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/tslott/subscriptions",
"type": "User",
"url": "https://api.github.com/users/tslott"
} | [] | closed | false | null | [] | null | 1 | "2020-12-29T14:15:37Z" | "2021-01-18T09:27:23Z" | "2021-01-18T09:27:23Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1663.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1663",
"merged_at": "2021-01-18T09:27:23Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1663.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1663"
} | - Update saving and loading methods for faiss index so to accept path like objects from pathlib
The current code only supports using a string type to save and load a faiss index. This change makes it possible to use a string type OR a Path from [pathlib](https://docs.python.org/3/library/pathlib.html). The codes becomes a more intuitive this way I think. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1663/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1663/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1662 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1662/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1662/comments | https://api.github.com/repos/huggingface/datasets/issues/1662/events | https://github.com/huggingface/datasets/issues/1662 | 775,890,154 | MDU6SXNzdWU3NzU4OTAxNTQ= | 1,662 | Arrow file is too large when saving vector data | {
"avatar_url": "https://avatars.githubusercontent.com/u/22360336?v=4",
"events_url": "https://api.github.com/users/weiwangorg/events{/privacy}",
"followers_url": "https://api.github.com/users/weiwangorg/followers",
"following_url": "https://api.github.com/users/weiwangorg/following{/other_user}",
"gists_url": "https://api.github.com/users/weiwangorg/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/weiwangorg",
"id": 22360336,
"login": "weiwangorg",
"node_id": "MDQ6VXNlcjIyMzYwMzM2",
"organizations_url": "https://api.github.com/users/weiwangorg/orgs",
"received_events_url": "https://api.github.com/users/weiwangorg/received_events",
"repos_url": "https://api.github.com/users/weiwangorg/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/weiwangorg/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/weiwangorg/subscriptions",
"type": "User",
"url": "https://api.github.com/users/weiwangorg"
} | [] | closed | false | null | [] | null | 4 | "2020-12-29T13:23:12Z" | "2021-01-21T14:12:39Z" | "2021-01-21T14:12:39Z" | NONE | null | null | null | I computed the sentence embedding of each sentence of bookcorpus data using bert base and saved them to disk. I used 20M sentences and the obtained arrow file is about 59GB while the original text file is only about 1.3GB. Are there any ways to reduce the size of the arrow file? | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1662/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1662/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1661 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1661/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1661/comments | https://api.github.com/repos/huggingface/datasets/issues/1661/events | https://github.com/huggingface/datasets/pull/1661 | 775,840,801 | MDExOlB1bGxSZXF1ZXN0NTQ2NDQzNjYx | 1,661 | updated dataset cards | {
"avatar_url": "https://avatars.githubusercontent.com/u/28673745?v=4",
"events_url": "https://api.github.com/users/Nilanshrajput/events{/privacy}",
"followers_url": "https://api.github.com/users/Nilanshrajput/followers",
"following_url": "https://api.github.com/users/Nilanshrajput/following{/other_user}",
"gists_url": "https://api.github.com/users/Nilanshrajput/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/Nilanshrajput",
"id": 28673745,
"login": "Nilanshrajput",
"node_id": "MDQ6VXNlcjI4NjczNzQ1",
"organizations_url": "https://api.github.com/users/Nilanshrajput/orgs",
"received_events_url": "https://api.github.com/users/Nilanshrajput/received_events",
"repos_url": "https://api.github.com/users/Nilanshrajput/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/Nilanshrajput/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Nilanshrajput/subscriptions",
"type": "User",
"url": "https://api.github.com/users/Nilanshrajput"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T11:20:40Z" | "2020-12-30T17:15:16Z" | "2020-12-30T17:15:16Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1661.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1661",
"merged_at": "2020-12-30T17:15:16Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1661.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1661"
} | added dataset instance in the card. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1661/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1661/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1660 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1660/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1660/comments | https://api.github.com/repos/huggingface/datasets/issues/1660/events | https://github.com/huggingface/datasets/pull/1660 | 775,831,423 | MDExOlB1bGxSZXF1ZXN0NTQ2NDM2MDg1 | 1,660 | add dataset info | {
"avatar_url": "https://avatars.githubusercontent.com/u/24206326?v=4",
"events_url": "https://api.github.com/users/harshalmittal4/events{/privacy}",
"followers_url": "https://api.github.com/users/harshalmittal4/followers",
"following_url": "https://api.github.com/users/harshalmittal4/following{/other_user}",
"gists_url": "https://api.github.com/users/harshalmittal4/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/harshalmittal4",
"id": 24206326,
"login": "harshalmittal4",
"node_id": "MDQ6VXNlcjI0MjA2MzI2",
"organizations_url": "https://api.github.com/users/harshalmittal4/orgs",
"received_events_url": "https://api.github.com/users/harshalmittal4/received_events",
"repos_url": "https://api.github.com/users/harshalmittal4/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/harshalmittal4/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/harshalmittal4/subscriptions",
"type": "User",
"url": "https://api.github.com/users/harshalmittal4"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T10:58:19Z" | "2020-12-30T17:04:30Z" | "2020-12-30T17:04:30Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1660.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1660",
"merged_at": "2020-12-30T17:04:30Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1660.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1660"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1660/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1660/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1659 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1659/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1659/comments | https://api.github.com/repos/huggingface/datasets/issues/1659/events | https://github.com/huggingface/datasets/pull/1659 | 775,831,288 | MDExOlB1bGxSZXF1ZXN0NTQ2NDM1OTcy | 1,659 | update dataset info | {
"avatar_url": "https://avatars.githubusercontent.com/u/24206326?v=4",
"events_url": "https://api.github.com/users/harshalmittal4/events{/privacy}",
"followers_url": "https://api.github.com/users/harshalmittal4/followers",
"following_url": "https://api.github.com/users/harshalmittal4/following{/other_user}",
"gists_url": "https://api.github.com/users/harshalmittal4/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/harshalmittal4",
"id": 24206326,
"login": "harshalmittal4",
"node_id": "MDQ6VXNlcjI0MjA2MzI2",
"organizations_url": "https://api.github.com/users/harshalmittal4/orgs",
"received_events_url": "https://api.github.com/users/harshalmittal4/received_events",
"repos_url": "https://api.github.com/users/harshalmittal4/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/harshalmittal4/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/harshalmittal4/subscriptions",
"type": "User",
"url": "https://api.github.com/users/harshalmittal4"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T10:58:01Z" | "2020-12-30T16:55:07Z" | "2020-12-30T16:55:07Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1659.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1659",
"merged_at": "2020-12-30T16:55:07Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1659.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1659"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1659/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1659/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1658 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1658/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1658/comments | https://api.github.com/repos/huggingface/datasets/issues/1658/events | https://github.com/huggingface/datasets/pull/1658 | 775,651,085 | MDExOlB1bGxSZXF1ZXN0NTQ2Mjg4Njg4 | 1,658 | brwac dataset: add instances and data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T01:24:45Z" | "2020-12-30T16:54:26Z" | "2020-12-30T16:54:26Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1658.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1658",
"merged_at": "2020-12-30T16:54:26Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1658.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1658"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1658/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1658/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1657 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1657/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1657/comments | https://api.github.com/repos/huggingface/datasets/issues/1657/events | https://github.com/huggingface/datasets/pull/1657 | 775,647,000 | MDExOlB1bGxSZXF1ZXN0NTQ2Mjg1NjU2 | 1,657 | mac_morpho dataset: add data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T01:05:21Z" | "2020-12-30T16:51:24Z" | "2020-12-30T16:51:24Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1657.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1657",
"merged_at": "2020-12-30T16:51:24Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1657.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1657"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1657/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1657/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1656 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1656/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1656/comments | https://api.github.com/repos/huggingface/datasets/issues/1656/events | https://github.com/huggingface/datasets/pull/1656 | 775,645,356 | MDExOlB1bGxSZXF1ZXN0NTQ2Mjg0NDI3 | 1,656 | assin 2 dataset: add instances and data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T00:57:51Z" | "2020-12-30T16:50:56Z" | "2020-12-30T16:50:56Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1656.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1656",
"merged_at": "2020-12-30T16:50:56Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1656.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1656"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1656/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1656/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1655 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1655/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1655/comments | https://api.github.com/repos/huggingface/datasets/issues/1655/events | https://github.com/huggingface/datasets/pull/1655 | 775,643,418 | MDExOlB1bGxSZXF1ZXN0NTQ2MjgyOTM4 | 1,655 | assin dataset: add instances and data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T00:47:56Z" | "2020-12-30T16:50:23Z" | "2020-12-30T16:50:23Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1655.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1655",
"merged_at": "2020-12-30T16:50:22Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1655.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1655"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1655/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1655/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1654 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1654/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1654/comments | https://api.github.com/repos/huggingface/datasets/issues/1654/events | https://github.com/huggingface/datasets/pull/1654 | 775,640,729 | MDExOlB1bGxSZXF1ZXN0NTQ2MjgwODIy | 1,654 | lener_br dataset: add instances and data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-29T00:35:12Z" | "2020-12-30T16:49:32Z" | "2020-12-30T16:49:32Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1654.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1654",
"merged_at": "2020-12-30T16:49:32Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1654.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1654"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1654/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1654/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1653 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1653/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1653/comments | https://api.github.com/repos/huggingface/datasets/issues/1653/events | https://github.com/huggingface/datasets/pull/1653 | 775,632,945 | MDExOlB1bGxSZXF1ZXN0NTQ2Mjc0Njc0 | 1,653 | harem dataset: add data splits info | {
"avatar_url": "https://avatars.githubusercontent.com/u/5097052?v=4",
"events_url": "https://api.github.com/users/jonatasgrosman/events{/privacy}",
"followers_url": "https://api.github.com/users/jonatasgrosman/followers",
"following_url": "https://api.github.com/users/jonatasgrosman/following{/other_user}",
"gists_url": "https://api.github.com/users/jonatasgrosman/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/jonatasgrosman",
"id": 5097052,
"login": "jonatasgrosman",
"node_id": "MDQ6VXNlcjUwOTcwNTI=",
"organizations_url": "https://api.github.com/users/jonatasgrosman/orgs",
"received_events_url": "https://api.github.com/users/jonatasgrosman/received_events",
"repos_url": "https://api.github.com/users/jonatasgrosman/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/jonatasgrosman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jonatasgrosman/subscriptions",
"type": "User",
"url": "https://api.github.com/users/jonatasgrosman"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T23:58:20Z" | "2020-12-30T16:49:03Z" | "2020-12-30T16:49:03Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1653.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1653",
"merged_at": "2020-12-30T16:49:03Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1653.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1653"
} | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1653/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1653/timeline | null | null | true |
|
https://api.github.com/repos/huggingface/datasets/issues/1652 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1652/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1652/comments | https://api.github.com/repos/huggingface/datasets/issues/1652/events | https://github.com/huggingface/datasets/pull/1652 | 775,571,813 | MDExOlB1bGxSZXF1ZXN0NTQ2MjI1NTM1 | 1,652 | Update dataset cards from previous sprint | {
"avatar_url": "https://avatars.githubusercontent.com/u/22435209?v=4",
"events_url": "https://api.github.com/users/j-chim/events{/privacy}",
"followers_url": "https://api.github.com/users/j-chim/followers",
"following_url": "https://api.github.com/users/j-chim/following{/other_user}",
"gists_url": "https://api.github.com/users/j-chim/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/j-chim",
"id": 22435209,
"login": "j-chim",
"node_id": "MDQ6VXNlcjIyNDM1MjA5",
"organizations_url": "https://api.github.com/users/j-chim/orgs",
"received_events_url": "https://api.github.com/users/j-chim/received_events",
"repos_url": "https://api.github.com/users/j-chim/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/j-chim/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/j-chim/subscriptions",
"type": "User",
"url": "https://api.github.com/users/j-chim"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T20:20:47Z" | "2020-12-30T16:48:04Z" | "2020-12-30T16:48:04Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1652.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1652",
"merged_at": "2020-12-30T16:48:04Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1652.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1652"
} | This PR updates the dataset cards/readmes for the 4 approved PRs I submitted in the previous sprint. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1652/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1652/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1651 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1651/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1651/comments | https://api.github.com/repos/huggingface/datasets/issues/1651/events | https://github.com/huggingface/datasets/pull/1651 | 775,554,319 | MDExOlB1bGxSZXF1ZXN0NTQ2MjExMjQw | 1,651 | Add twi wordsim353 | {
"avatar_url": "https://avatars.githubusercontent.com/u/23586676?v=4",
"events_url": "https://api.github.com/users/dadelani/events{/privacy}",
"followers_url": "https://api.github.com/users/dadelani/followers",
"following_url": "https://api.github.com/users/dadelani/following{/other_user}",
"gists_url": "https://api.github.com/users/dadelani/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/dadelani",
"id": 23586676,
"login": "dadelani",
"node_id": "MDQ6VXNlcjIzNTg2Njc2",
"organizations_url": "https://api.github.com/users/dadelani/orgs",
"received_events_url": "https://api.github.com/users/dadelani/received_events",
"repos_url": "https://api.github.com/users/dadelani/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/dadelani/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dadelani/subscriptions",
"type": "User",
"url": "https://api.github.com/users/dadelani"
} | [] | closed | false | null | [] | null | 3 | "2020-12-28T19:31:55Z" | "2021-01-04T09:39:39Z" | "2021-01-04T09:39:38Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1651.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1651",
"merged_at": "2021-01-04T09:39:38Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1651.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1651"
} | Added the citation information to the README file | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1651/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1651/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1650 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1650/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1650/comments | https://api.github.com/repos/huggingface/datasets/issues/1650/events | https://github.com/huggingface/datasets/pull/1650 | 775,545,912 | MDExOlB1bGxSZXF1ZXN0NTQ2MjA0MzYy | 1,650 | Update README.md | {
"avatar_url": "https://avatars.githubusercontent.com/u/15351802?v=4",
"events_url": "https://api.github.com/users/MisbahKhan789/events{/privacy}",
"followers_url": "https://api.github.com/users/MisbahKhan789/followers",
"following_url": "https://api.github.com/users/MisbahKhan789/following{/other_user}",
"gists_url": "https://api.github.com/users/MisbahKhan789/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/MisbahKhan789",
"id": 15351802,
"login": "MisbahKhan789",
"node_id": "MDQ6VXNlcjE1MzUxODAy",
"organizations_url": "https://api.github.com/users/MisbahKhan789/orgs",
"received_events_url": "https://api.github.com/users/MisbahKhan789/received_events",
"repos_url": "https://api.github.com/users/MisbahKhan789/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/MisbahKhan789/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/MisbahKhan789/subscriptions",
"type": "User",
"url": "https://api.github.com/users/MisbahKhan789"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T19:09:05Z" | "2020-12-29T10:43:14Z" | "2020-12-29T10:43:14Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1650.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1650",
"merged_at": "2020-12-29T10:43:14Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1650.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1650"
} | added dataset summary | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1650/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1650/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1649 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1649/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1649/comments | https://api.github.com/repos/huggingface/datasets/issues/1649/events | https://github.com/huggingface/datasets/pull/1649 | 775,544,487 | MDExOlB1bGxSZXF1ZXN0NTQ2MjAzMjE1 | 1,649 | Update README.md | {
"avatar_url": "https://avatars.githubusercontent.com/u/15351802?v=4",
"events_url": "https://api.github.com/users/MisbahKhan789/events{/privacy}",
"followers_url": "https://api.github.com/users/MisbahKhan789/followers",
"following_url": "https://api.github.com/users/MisbahKhan789/following{/other_user}",
"gists_url": "https://api.github.com/users/MisbahKhan789/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/MisbahKhan789",
"id": 15351802,
"login": "MisbahKhan789",
"node_id": "MDQ6VXNlcjE1MzUxODAy",
"organizations_url": "https://api.github.com/users/MisbahKhan789/orgs",
"received_events_url": "https://api.github.com/users/MisbahKhan789/received_events",
"repos_url": "https://api.github.com/users/MisbahKhan789/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/MisbahKhan789/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/MisbahKhan789/subscriptions",
"type": "User",
"url": "https://api.github.com/users/MisbahKhan789"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T19:05:00Z" | "2020-12-29T10:50:58Z" | "2020-12-29T10:43:03Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1649.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1649",
"merged_at": "2020-12-29T10:43:03Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1649.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1649"
} | Added information in the dataset card | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1649/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1649/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1648 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1648/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1648/comments | https://api.github.com/repos/huggingface/datasets/issues/1648/events | https://github.com/huggingface/datasets/pull/1648 | 775,542,360 | MDExOlB1bGxSZXF1ZXN0NTQ2MjAxNTQ0 | 1,648 | Update README.md | {
"avatar_url": "https://avatars.githubusercontent.com/u/15351802?v=4",
"events_url": "https://api.github.com/users/MisbahKhan789/events{/privacy}",
"followers_url": "https://api.github.com/users/MisbahKhan789/followers",
"following_url": "https://api.github.com/users/MisbahKhan789/following{/other_user}",
"gists_url": "https://api.github.com/users/MisbahKhan789/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/MisbahKhan789",
"id": 15351802,
"login": "MisbahKhan789",
"node_id": "MDQ6VXNlcjE1MzUxODAy",
"organizations_url": "https://api.github.com/users/MisbahKhan789/orgs",
"received_events_url": "https://api.github.com/users/MisbahKhan789/received_events",
"repos_url": "https://api.github.com/users/MisbahKhan789/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/MisbahKhan789/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/MisbahKhan789/subscriptions",
"type": "User",
"url": "https://api.github.com/users/MisbahKhan789"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T18:59:06Z" | "2020-12-29T10:39:14Z" | "2020-12-29T10:39:14Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1648.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1648",
"merged_at": "2020-12-29T10:39:14Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1648.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1648"
} | added dataset summary | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1648/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1648/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1647 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1647/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1647/comments | https://api.github.com/repos/huggingface/datasets/issues/1647/events | https://github.com/huggingface/datasets/issues/1647 | 775,525,799 | MDU6SXNzdWU3NzU1MjU3OTk= | 1,647 | NarrativeQA fails to load with `load_dataset` | {
"avatar_url": "https://avatars.githubusercontent.com/u/56408839?v=4",
"events_url": "https://api.github.com/users/eric-mitchell/events{/privacy}",
"followers_url": "https://api.github.com/users/eric-mitchell/followers",
"following_url": "https://api.github.com/users/eric-mitchell/following{/other_user}",
"gists_url": "https://api.github.com/users/eric-mitchell/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/eric-mitchell",
"id": 56408839,
"login": "eric-mitchell",
"node_id": "MDQ6VXNlcjU2NDA4ODM5",
"organizations_url": "https://api.github.com/users/eric-mitchell/orgs",
"received_events_url": "https://api.github.com/users/eric-mitchell/received_events",
"repos_url": "https://api.github.com/users/eric-mitchell/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/eric-mitchell/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/eric-mitchell/subscriptions",
"type": "User",
"url": "https://api.github.com/users/eric-mitchell"
} | [] | closed | false | null | [] | null | 3 | "2020-12-28T18:16:09Z" | "2021-01-05T12:05:08Z" | "2021-01-03T17:58:05Z" | NONE | null | null | null | When loading the NarrativeQA dataset with `load_dataset('narrativeqa')` as given in the documentation [here](https://huggingface.co/datasets/narrativeqa), I receive a cascade of exceptions, ending with
FileNotFoundError: Couldn't find file locally at narrativeqa/narrativeqa.py, or remotely at
https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/narrativeqa/narrativeqa.py or
https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/narrativeqa/narrativeqa.py
Workaround: manually copy the `narrativeqa.py` builder into my local directory with
curl https://raw.githubusercontent.com/huggingface/datasets/master/datasets/narrativeqa/narrativeqa.py -o narrativeqa.py
and load the dataset as `load_dataset('narrativeqa.py')` everything works fine. I'm on datasets v1.1.3 using Python 3.6.10. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1647/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1647/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1646 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1646/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1646/comments | https://api.github.com/repos/huggingface/datasets/issues/1646/events | https://github.com/huggingface/datasets/pull/1646 | 775,499,344 | MDExOlB1bGxSZXF1ZXN0NTQ2MTY4MTk3 | 1,646 | Add missing homepage in some dataset cards | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T17:09:48Z" | "2021-01-04T14:08:57Z" | "2021-01-04T14:08:56Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1646.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1646",
"merged_at": "2021-01-04T14:08:56Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1646.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1646"
} | In some dataset cards the homepage field in the `Dataset Description` section was missing/empty | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1646/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1646/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1645 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1645/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1645/comments | https://api.github.com/repos/huggingface/datasets/issues/1645/events | https://github.com/huggingface/datasets/pull/1645 | 775,473,106 | MDExOlB1bGxSZXF1ZXN0NTQ2MTQ4OTUx | 1,645 | Rename "part-of-speech-tagging" tag in some dataset cards | {
"avatar_url": "https://avatars.githubusercontent.com/u/42851186?v=4",
"events_url": "https://api.github.com/users/lhoestq/events{/privacy}",
"followers_url": "https://api.github.com/users/lhoestq/followers",
"following_url": "https://api.github.com/users/lhoestq/following{/other_user}",
"gists_url": "https://api.github.com/users/lhoestq/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/lhoestq",
"id": 42851186,
"login": "lhoestq",
"node_id": "MDQ6VXNlcjQyODUxMTg2",
"organizations_url": "https://api.github.com/users/lhoestq/orgs",
"received_events_url": "https://api.github.com/users/lhoestq/received_events",
"repos_url": "https://api.github.com/users/lhoestq/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/lhoestq/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lhoestq/subscriptions",
"type": "User",
"url": "https://api.github.com/users/lhoestq"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T16:09:09Z" | "2021-01-07T10:08:14Z" | "2021-01-07T10:08:13Z" | MEMBER | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1645.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1645",
"merged_at": "2021-01-07T10:08:13Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1645.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1645"
} | `part-of-speech-tagging` was not part of the tagging taxonomy under `structure-prediction` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1645/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1645/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1644 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1644/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1644/comments | https://api.github.com/repos/huggingface/datasets/issues/1644/events | https://github.com/huggingface/datasets/issues/1644 | 775,375,880 | MDU6SXNzdWU3NzUzNzU4ODA= | 1,644 | HoVeR dataset fails to load | {
"avatar_url": "https://avatars.githubusercontent.com/u/1473778?v=4",
"events_url": "https://api.github.com/users/urikz/events{/privacy}",
"followers_url": "https://api.github.com/users/urikz/followers",
"following_url": "https://api.github.com/users/urikz/following{/other_user}",
"gists_url": "https://api.github.com/users/urikz/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/urikz",
"id": 1473778,
"login": "urikz",
"node_id": "MDQ6VXNlcjE0NzM3Nzg=",
"organizations_url": "https://api.github.com/users/urikz/orgs",
"received_events_url": "https://api.github.com/users/urikz/received_events",
"repos_url": "https://api.github.com/users/urikz/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/urikz/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/urikz/subscriptions",
"type": "User",
"url": "https://api.github.com/users/urikz"
} | [] | closed | false | null | [] | null | 1 | "2020-12-28T12:27:07Z" | "2022-10-05T12:40:34Z" | "2022-10-05T12:40:34Z" | NONE | null | null | null | Hi! I'm getting an error when trying to load **HoVeR** dataset. Another one (**SQuAD**) does work for me. I'm using the latest (1.1.3) version of the library.
Steps to reproduce the error:
```python
>>> from datasets import load_dataset
>>> dataset = load_dataset("hover")
Traceback (most recent call last):
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/load.py", line 267, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 308, in cached_path
use_etag=download_config.use_etag,
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/hover/hover.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/load.py", line 278, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 308, in cached_path
use_etag=download_config.use_etag,
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/utils/file_utils.py", line 486, in get_from_cache
raise FileNotFoundError("Couldn't find file at {}".format(url))
FileNotFoundError: Couldn't find file at https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/hover/hover.py
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/load.py", line 589, in load_dataset
path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True
File "/Users/urikz/anaconda/envs/mentionmemory/lib/python3.7/site-packages/datasets/load.py", line 282, in prepare_module
combined_path, github_file_path, file_path
FileNotFoundError: Couldn't find file locally at hover/hover.py, or remotely at https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/hover/hover.py or https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/hover/hover.py
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1644/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1644/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1643 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1643/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1643/comments | https://api.github.com/repos/huggingface/datasets/issues/1643/events | https://github.com/huggingface/datasets/issues/1643 | 775,280,046 | MDU6SXNzdWU3NzUyODAwNDY= | 1,643 | Dataset social_bias_frames 404 | {
"avatar_url": "https://avatars.githubusercontent.com/u/7501517?v=4",
"events_url": "https://api.github.com/users/atemate/events{/privacy}",
"followers_url": "https://api.github.com/users/atemate/followers",
"following_url": "https://api.github.com/users/atemate/following{/other_user}",
"gists_url": "https://api.github.com/users/atemate/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/atemate",
"id": 7501517,
"login": "atemate",
"node_id": "MDQ6VXNlcjc1MDE1MTc=",
"organizations_url": "https://api.github.com/users/atemate/orgs",
"received_events_url": "https://api.github.com/users/atemate/received_events",
"repos_url": "https://api.github.com/users/atemate/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/atemate/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/atemate/subscriptions",
"type": "User",
"url": "https://api.github.com/users/atemate"
} | [] | closed | false | null | [] | null | 1 | "2020-12-28T08:35:34Z" | "2020-12-28T08:38:07Z" | "2020-12-28T08:38:07Z" | NONE | null | null | null | ```
>>> from datasets import load_dataset
>>> dataset = load_dataset("social_bias_frames")
...
Downloading and preparing dataset social_bias_frames/default
...
~/.pyenv/versions/3.7.6/lib/python3.7/site-packages/datasets/utils/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag)
484 )
485 elif response is not None and response.status_code == 404:
--> 486 raise FileNotFoundError("Couldn't find file at {}".format(url))
487 raise ConnectionError("Couldn't reach {}".format(url))
488
FileNotFoundError: Couldn't find file at https://homes.cs.washington.edu/~msap/social-bias-frames/SocialBiasFrames_v2.tgz
```
[Here](https://homes.cs.washington.edu/~msap/social-bias-frames/) we find button `Download data` with the correct URL for the data: https://homes.cs.washington.edu/~msap/social-bias-frames/SBIC.v2.tgz | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1643/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1643/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1642 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1642/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1642/comments | https://api.github.com/repos/huggingface/datasets/issues/1642/events | https://github.com/huggingface/datasets/pull/1642 | 775,159,568 | MDExOlB1bGxSZXF1ZXN0NTQ1ODk1MzY1 | 1,642 | Ollie dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/8900094?v=4",
"events_url": "https://api.github.com/users/ontocord/events{/privacy}",
"followers_url": "https://api.github.com/users/ontocord/followers",
"following_url": "https://api.github.com/users/ontocord/following{/other_user}",
"gists_url": "https://api.github.com/users/ontocord/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ontocord",
"id": 8900094,
"login": "ontocord",
"node_id": "MDQ6VXNlcjg5MDAwOTQ=",
"organizations_url": "https://api.github.com/users/ontocord/orgs",
"received_events_url": "https://api.github.com/users/ontocord/received_events",
"repos_url": "https://api.github.com/users/ontocord/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ontocord/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ontocord/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ontocord"
} | [] | closed | false | null | [] | null | 0 | "2020-12-28T02:43:37Z" | "2021-01-04T13:35:25Z" | "2021-01-04T13:35:24Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1642.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1642",
"merged_at": "2021-01-04T13:35:24Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1642.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1642"
} | This is the dataset used to train the Ollie open information extraction algorithm. It has over 21M sentences. See http://knowitall.github.io/ollie/ for more details. | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1642/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1642/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1641 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1641/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1641/comments | https://api.github.com/repos/huggingface/datasets/issues/1641/events | https://github.com/huggingface/datasets/issues/1641 | 775,110,872 | MDU6SXNzdWU3NzUxMTA4NzI= | 1,641 | muchocine dataset cannot be dowloaded | {
"avatar_url": "https://avatars.githubusercontent.com/u/3653789?v=4",
"events_url": "https://api.github.com/users/mrm8488/events{/privacy}",
"followers_url": "https://api.github.com/users/mrm8488/followers",
"following_url": "https://api.github.com/users/mrm8488/following{/other_user}",
"gists_url": "https://api.github.com/users/mrm8488/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/mrm8488",
"id": 3653789,
"login": "mrm8488",
"node_id": "MDQ6VXNlcjM2NTM3ODk=",
"organizations_url": "https://api.github.com/users/mrm8488/orgs",
"received_events_url": "https://api.github.com/users/mrm8488/received_events",
"repos_url": "https://api.github.com/users/mrm8488/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/mrm8488/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/mrm8488/subscriptions",
"type": "User",
"url": "https://api.github.com/users/mrm8488"
} | [
{
"color": "ffffff",
"default": true,
"description": "This will not be worked on",
"id": 1935892913,
"name": "wontfix",
"node_id": "MDU6TGFiZWwxOTM1ODkyOTEz",
"url": "https://api.github.com/repos/huggingface/datasets/labels/wontfix"
},
{
"color": "2edb81",
"default": false,
"description": "A bug in a dataset script provided in the library",
"id": 2067388877,
"name": "dataset bug",
"node_id": "MDU6TGFiZWwyMDY3Mzg4ODc3",
"url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20bug"
}
] | closed | false | null | [] | null | 5 | "2020-12-27T21:26:28Z" | "2021-08-03T05:07:29Z" | "2021-08-03T05:07:29Z" | CONTRIBUTOR | null | null | null | ```python
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/datasets/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)
267 try:
--> 268 local_path = cached_path(file_path, download_config=download_config)
269 except FileNotFoundError:
7 frames
FileNotFoundError: Couldn't find file at https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/muchocine/muchocine.py
During handling of the above exception, another exception occurred:
FileNotFoundError Traceback (most recent call last)
FileNotFoundError: Couldn't find file at https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/muchocine/muchocine.py
During handling of the above exception, another exception occurred:
FileNotFoundError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/datasets/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, **download_kwargs)
281 raise FileNotFoundError(
282 "Couldn't find file locally at {}, or remotely at {} or {}".format(
--> 283 combined_path, github_file_path, file_path
284 )
285 )
FileNotFoundError: Couldn't find file locally at muchocine/muchocine.py, or remotely at https://raw.githubusercontent.com/huggingface/datasets/1.0.2/datasets/muchocine/muchocine.py or https://s3.amazonaws.com/datasets.huggingface.co/datasets/datasets/muchocine/muchocine.py
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1641/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1641/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1640 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1640/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1640/comments | https://api.github.com/repos/huggingface/datasets/issues/1640/events | https://github.com/huggingface/datasets/pull/1640 | 774,921,836 | MDExOlB1bGxSZXF1ZXN0NTQ1NzI2NzY4 | 1,640 | Fix "'BertTokenizerFast' object has no attribute 'max_len'" | {
"avatar_url": "https://avatars.githubusercontent.com/u/15031715?v=4",
"events_url": "https://api.github.com/users/mflis/events{/privacy}",
"followers_url": "https://api.github.com/users/mflis/followers",
"following_url": "https://api.github.com/users/mflis/following{/other_user}",
"gists_url": "https://api.github.com/users/mflis/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/mflis",
"id": 15031715,
"login": "mflis",
"node_id": "MDQ6VXNlcjE1MDMxNzE1",
"organizations_url": "https://api.github.com/users/mflis/orgs",
"received_events_url": "https://api.github.com/users/mflis/received_events",
"repos_url": "https://api.github.com/users/mflis/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/mflis/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/mflis/subscriptions",
"type": "User",
"url": "https://api.github.com/users/mflis"
} | [] | closed | false | null | [] | null | 0 | "2020-12-26T19:25:41Z" | "2020-12-28T17:26:35Z" | "2020-12-28T17:26:35Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1640.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1640",
"merged_at": "2020-12-28T17:26:35Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1640.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1640"
} | Tensorflow 2.3.0 gives:
FutureWarning: The `max_len` attribute has been deprecated and will be removed in a future version, use `model_max_length` instead.
Tensorflow 2.4.0 gives:
AttributeError 'BertTokenizerFast' object has no attribute 'max_len' | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1640/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1640/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1639 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1639/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1639/comments | https://api.github.com/repos/huggingface/datasets/issues/1639/events | https://github.com/huggingface/datasets/issues/1639 | 774,903,472 | MDU6SXNzdWU3NzQ5MDM0NzI= | 1,639 | bug with sst2 in glue | {
"avatar_url": "https://avatars.githubusercontent.com/u/10137?v=4",
"events_url": "https://api.github.com/users/ghost/events{/privacy}",
"followers_url": "https://api.github.com/users/ghost/followers",
"following_url": "https://api.github.com/users/ghost/following{/other_user}",
"gists_url": "https://api.github.com/users/ghost/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ghost",
"id": 10137,
"login": "ghost",
"node_id": "MDQ6VXNlcjEwMTM3",
"organizations_url": "https://api.github.com/users/ghost/orgs",
"received_events_url": "https://api.github.com/users/ghost/received_events",
"repos_url": "https://api.github.com/users/ghost/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ghost/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ghost/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ghost"
} | [] | closed | false | null | [] | null | 3 | "2020-12-26T16:57:23Z" | "2022-10-05T12:40:16Z" | "2022-10-05T12:40:16Z" | NONE | null | null | null | Hi
I am getting very low accuracy on SST2 I investigate this and observe that for this dataset sentences are tokenized, while this is correct for the other datasets in GLUE, please see below.
Is there any alternatives I could get untokenized sentences? I am unfortunately under time pressure to report some results on this dataset. thank you for your help. @lhoestq
```
>>> a = datasets.load_dataset('glue', 'sst2', split="validation", script_version="master")
Reusing dataset glue (/julia/datasets/glue/sst2/1.0.0/7c99657241149a24692c402a5c3f34d4c9f1df5ac2e4c3759fadea38f6cb29c4)
>>> a[:10]
{'idx': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 'label': [1, 0, 1, 1, 0, 1, 0, 0, 1, 0], 'sentence': ["it 's a charming and often affecting journey . ", 'unflinchingly bleak and desperate ', 'allows us to hope that nolan is poised to embark a major career as a commercial yet inventive filmmaker . ', "the acting , costumes , music , cinematography and sound are all astounding given the production 's austere locales . ", "it 's slow -- very , very slow . ", 'although laced with humor and a few fanciful touches , the film is a refreshingly serious look at young women . ', 'a sometimes tedious film . ', "or doing last year 's taxes with your ex-wife . ", "you do n't have to know about music to appreciate the film 's easygoing blend of comedy and romance . ", "in exactly 89 minutes , most of which passed as slowly as if i 'd been sitting naked on an igloo , formula 51 sank from quirky to jerky to utter turkey . "]}
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1639/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1639/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1638 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1638/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1638/comments | https://api.github.com/repos/huggingface/datasets/issues/1638/events | https://github.com/huggingface/datasets/pull/1638 | 774,869,184 | MDExOlB1bGxSZXF1ZXN0NTQ1Njg5ODQ5 | 1,638 | Add id_puisi dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/31740013?v=4",
"events_url": "https://api.github.com/users/ilhamfp/events{/privacy}",
"followers_url": "https://api.github.com/users/ilhamfp/followers",
"following_url": "https://api.github.com/users/ilhamfp/following{/other_user}",
"gists_url": "https://api.github.com/users/ilhamfp/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ilhamfp",
"id": 31740013,
"login": "ilhamfp",
"node_id": "MDQ6VXNlcjMxNzQwMDEz",
"organizations_url": "https://api.github.com/users/ilhamfp/orgs",
"received_events_url": "https://api.github.com/users/ilhamfp/received_events",
"repos_url": "https://api.github.com/users/ilhamfp/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ilhamfp/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ilhamfp/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ilhamfp"
} | [] | closed | false | null | [] | null | 0 | "2020-12-26T12:41:55Z" | "2020-12-30T16:34:17Z" | "2020-12-30T16:34:17Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1638.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1638",
"merged_at": "2020-12-30T16:34:17Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1638.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1638"
} | Puisi (poem) is an Indonesian poetic form. The dataset contains 7223 Indonesian puisi with its title and author. :) | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1638/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1638/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1637 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1637/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1637/comments | https://api.github.com/repos/huggingface/datasets/issues/1637/events | https://github.com/huggingface/datasets/pull/1637 | 774,710,014 | MDExOlB1bGxSZXF1ZXN0NTQ1NTc1NTMw | 1,637 | Added `pn_summary` dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/2601833?v=4",
"events_url": "https://api.github.com/users/m3hrdadfi/events{/privacy}",
"followers_url": "https://api.github.com/users/m3hrdadfi/followers",
"following_url": "https://api.github.com/users/m3hrdadfi/following{/other_user}",
"gists_url": "https://api.github.com/users/m3hrdadfi/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/m3hrdadfi",
"id": 2601833,
"login": "m3hrdadfi",
"node_id": "MDQ6VXNlcjI2MDE4MzM=",
"organizations_url": "https://api.github.com/users/m3hrdadfi/orgs",
"received_events_url": "https://api.github.com/users/m3hrdadfi/received_events",
"repos_url": "https://api.github.com/users/m3hrdadfi/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/m3hrdadfi/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/m3hrdadfi/subscriptions",
"type": "User",
"url": "https://api.github.com/users/m3hrdadfi"
} | [] | closed | false | null | [] | null | 2 | "2020-12-25T11:01:24Z" | "2021-01-04T13:43:19Z" | "2021-01-04T13:43:19Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1637.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1637",
"merged_at": "2021-01-04T13:43:19Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1637.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1637"
} | #1635
You did a great job with the fluent procedure regarding adding a dataset. I took the chance to add the dataset on my own. Thank you for your awesome job, and I hope this dataset found the researchers happy, specifically those interested in Persian Language (Farsi)! | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1637/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1637/timeline | null | null | true |
https://api.github.com/repos/huggingface/datasets/issues/1636 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1636/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1636/comments | https://api.github.com/repos/huggingface/datasets/issues/1636/events | https://github.com/huggingface/datasets/issues/1636 | 774,574,378 | MDU6SXNzdWU3NzQ1NzQzNzg= | 1,636 | winogrande cannot be dowloaded | {
"avatar_url": "https://avatars.githubusercontent.com/u/10137?v=4",
"events_url": "https://api.github.com/users/ghost/events{/privacy}",
"followers_url": "https://api.github.com/users/ghost/followers",
"following_url": "https://api.github.com/users/ghost/following{/other_user}",
"gists_url": "https://api.github.com/users/ghost/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ghost",
"id": 10137,
"login": "ghost",
"node_id": "MDQ6VXNlcjEwMTM3",
"organizations_url": "https://api.github.com/users/ghost/orgs",
"received_events_url": "https://api.github.com/users/ghost/received_events",
"repos_url": "https://api.github.com/users/ghost/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ghost/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ghost/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ghost"
} | [] | closed | false | null | [] | null | 2 | "2020-12-24T22:28:22Z" | "2022-10-05T12:35:44Z" | "2022-10-05T12:35:44Z" | NONE | null | null | null | Hi,
I am getting this error when trying to run the codes on the cloud. Thank you for any suggestion and help on this @lhoestq
```
File "./finetune_trainer.py", line 318, in <module>
main()
File "./finetune_trainer.py", line 148, in main
for task in data_args.tasks]
File "./finetune_trainer.py", line 148, in <listcomp>
for task in data_args.tasks]
File "/workdir/seq2seq/data/tasks.py", line 65, in get_dataset
dataset = self.load_dataset(split=split)
File "/workdir/seq2seq/data/tasks.py", line 466, in load_dataset
return datasets.load_dataset('winogrande', 'winogrande_l', split=split)
File "/usr/local/lib/python3.6/dist-packages/datasets/load.py", line 589, in load_dataset
path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True
File "/usr/local/lib/python3.6/dist-packages/datasets/load.py", line 267, in prepare_module
local_path = cached_path(file_path, download_config=download_config)
File "/usr/local/lib/python3.6/dist-packages/datasets/utils/file_utils.py", line 308, in cached_path
use_etag=download_config.use_etag,
File "/usr/local/lib/python3.6/dist-packages/datasets/utils/file_utils.py", line 487, in get_from_cache
raise ConnectionError("Couldn't reach {}".format(url))
ConnectionError: Couldn't reach https://raw.githubusercontent.com/huggingface/datasets/1.1.3/datasets/winogrande/winogrande.py
yo/0 I1224 14:17:46.419031 31226 main shadow.py:122 > Traceback (most recent call last):
File "/usr/lib/python3.6/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.6/dist-packages/torch/distributed/launch.py", line 260, in <module>
main()
File "/usr/local/lib/python3.6/dist-packages/torch/distributed/launch.py", line 256, in main
cmd=cmd)
``` | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1636/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1636/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1635 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1635/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1635/comments | https://api.github.com/repos/huggingface/datasets/issues/1635/events | https://github.com/huggingface/datasets/issues/1635 | 774,524,492 | MDU6SXNzdWU3NzQ1MjQ0OTI= | 1,635 | Persian Abstractive/Extractive Text Summarization | {
"avatar_url": "https://avatars.githubusercontent.com/u/2601833?v=4",
"events_url": "https://api.github.com/users/m3hrdadfi/events{/privacy}",
"followers_url": "https://api.github.com/users/m3hrdadfi/followers",
"following_url": "https://api.github.com/users/m3hrdadfi/following{/other_user}",
"gists_url": "https://api.github.com/users/m3hrdadfi/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/m3hrdadfi",
"id": 2601833,
"login": "m3hrdadfi",
"node_id": "MDQ6VXNlcjI2MDE4MzM=",
"organizations_url": "https://api.github.com/users/m3hrdadfi/orgs",
"received_events_url": "https://api.github.com/users/m3hrdadfi/received_events",
"repos_url": "https://api.github.com/users/m3hrdadfi/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/m3hrdadfi/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/m3hrdadfi/subscriptions",
"type": "User",
"url": "https://api.github.com/users/m3hrdadfi"
} | [
{
"color": "e99695",
"default": false,
"description": "Requesting to add a new dataset",
"id": 2067376369,
"name": "dataset request",
"node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5",
"url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request"
}
] | closed | false | null | [] | null | 0 | "2020-12-24T17:47:12Z" | "2021-01-04T15:11:04Z" | "2021-01-04T15:11:04Z" | CONTRIBUTOR | null | null | null | Assembling datasets tailored to different tasks and languages is a precious target. This would be great to have this dataset included.
## Adding a Dataset
- **Name:** *pn-summary*
- **Description:** *A well-structured summarization dataset for the Persian language consists of 93,207 records. It is prepared for Abstractive/Extractive tasks (like cnn_dailymail for English). It can also be used in other scopes like Text Generation, Title Generation, and News Category Classification.*
- **Paper:** *https://arxiv.org/abs/2012.11204*
- **Data:** *https://github.com/hooshvare/pn-summary/#download*
- **Motivation:** *It is the first Persian abstractive/extractive Text summarization dataset (like cnn_dailymail for English)!*
Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md).
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1635/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1635/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1634 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1634/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1634/comments | https://api.github.com/repos/huggingface/datasets/issues/1634/events | https://github.com/huggingface/datasets/issues/1634 | 774,487,934 | MDU6SXNzdWU3NzQ0ODc5MzQ= | 1,634 | Inspecting datasets per category | {
"avatar_url": "https://avatars.githubusercontent.com/u/10137?v=4",
"events_url": "https://api.github.com/users/ghost/events{/privacy}",
"followers_url": "https://api.github.com/users/ghost/followers",
"following_url": "https://api.github.com/users/ghost/following{/other_user}",
"gists_url": "https://api.github.com/users/ghost/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ghost",
"id": 10137,
"login": "ghost",
"node_id": "MDQ6VXNlcjEwMTM3",
"organizations_url": "https://api.github.com/users/ghost/orgs",
"received_events_url": "https://api.github.com/users/ghost/received_events",
"repos_url": "https://api.github.com/users/ghost/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ghost/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ghost/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ghost"
} | [] | closed | false | null | [] | null | 4 | "2020-12-24T15:26:34Z" | "2022-10-04T14:57:33Z" | "2022-10-04T14:57:33Z" | NONE | null | null | null | Hi
Is there a way I could get all NLI datasets/all QA datasets to get some understanding of available datasets per category? this is hard for me to inspect the datasets one by one in the webpage, thanks for the suggestions @lhoestq | {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1634/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1634/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1633 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1633/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1633/comments | https://api.github.com/repos/huggingface/datasets/issues/1633/events | https://github.com/huggingface/datasets/issues/1633 | 774,422,603 | MDU6SXNzdWU3NzQ0MjI2MDM= | 1,633 | social_i_qa wrong format of labels | {
"avatar_url": "https://avatars.githubusercontent.com/u/10137?v=4",
"events_url": "https://api.github.com/users/ghost/events{/privacy}",
"followers_url": "https://api.github.com/users/ghost/followers",
"following_url": "https://api.github.com/users/ghost/following{/other_user}",
"gists_url": "https://api.github.com/users/ghost/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/ghost",
"id": 10137,
"login": "ghost",
"node_id": "MDQ6VXNlcjEwMTM3",
"organizations_url": "https://api.github.com/users/ghost/orgs",
"received_events_url": "https://api.github.com/users/ghost/received_events",
"repos_url": "https://api.github.com/users/ghost/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/ghost/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ghost/subscriptions",
"type": "User",
"url": "https://api.github.com/users/ghost"
} | [] | closed | false | null | [] | null | 2 | "2020-12-24T13:11:54Z" | "2020-12-30T17:18:49Z" | "2020-12-30T17:18:49Z" | NONE | null | null | null | Hi,
there is extra "\n" in labels of social_i_qa datasets, no big deal, but I was wondering if you could remove it to make it consistent.
so label is 'label': '1\n', not '1'
thanks
```
>>> import datasets
>>> from datasets import load_dataset
>>> dataset = load_dataset(
... 'social_i_qa')
cahce dir /julia/cache/datasets
Downloading: 4.72kB [00:00, 3.52MB/s]
cahce dir /julia/cache/datasets
Downloading: 2.19kB [00:00, 1.81MB/s]
Using custom data configuration default
Reusing dataset social_i_qa (/julia/datasets/social_i_qa/default/0.1.0/4a4190cc2d2482d43416c2167c0c5dccdd769d4482e84893614bd069e5c3ba06)
>>> dataset['train'][0]
{'answerA': 'like attending', 'answerB': 'like staying home', 'answerC': 'a good friend to have', 'context': 'Cameron decided to have a barbecue and gathered her friends together.', 'label': '1\n', 'question': 'How would Others feel as a result?'}
```
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1633/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1633/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1632 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1632/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1632/comments | https://api.github.com/repos/huggingface/datasets/issues/1632/events | https://github.com/huggingface/datasets/issues/1632 | 774,388,625 | MDU6SXNzdWU3NzQzODg2MjU= | 1,632 | SICK dataset | {
"avatar_url": "https://avatars.githubusercontent.com/u/6278280?v=4",
"events_url": "https://api.github.com/users/rabeehk/events{/privacy}",
"followers_url": "https://api.github.com/users/rabeehk/followers",
"following_url": "https://api.github.com/users/rabeehk/following{/other_user}",
"gists_url": "https://api.github.com/users/rabeehk/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/rabeehk",
"id": 6278280,
"login": "rabeehk",
"node_id": "MDQ6VXNlcjYyNzgyODA=",
"organizations_url": "https://api.github.com/users/rabeehk/orgs",
"received_events_url": "https://api.github.com/users/rabeehk/received_events",
"repos_url": "https://api.github.com/users/rabeehk/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/rabeehk/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/rabeehk/subscriptions",
"type": "User",
"url": "https://api.github.com/users/rabeehk"
} | [
{
"color": "e99695",
"default": false,
"description": "Requesting to add a new dataset",
"id": 2067376369,
"name": "dataset request",
"node_id": "MDU6TGFiZWwyMDY3Mzc2MzY5",
"url": "https://api.github.com/repos/huggingface/datasets/labels/dataset%20request"
}
] | closed | false | null | [] | null | 0 | "2020-12-24T12:40:14Z" | "2021-02-05T15:49:25Z" | "2021-02-05T15:49:25Z" | CONTRIBUTOR | null | null | null | Hi, this would be great to have this dataset included. I might be missing something, but I could not find it in the list of already included datasets. Thank you.
## Adding a Dataset
- **Name:** SICK
- **Description:** SICK consists of about 10,000 English sentence pairs that include many examples of the lexical, syntactic, and semantic phenomena.
- **Paper:** https://www.aclweb.org/anthology/L14-1314/
- **Data:** http://marcobaroni.org/composes/sick.html
- **Motivation:** This dataset is well-known in the NLP community used for recognizing entailment between sentences.
Instructions to add a new dataset can be found [here](https://github.com/huggingface/datasets/blob/master/ADD_NEW_DATASET.md).
| {
"+1": 0,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 0,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1632/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1632/timeline | null | completed | false |
https://api.github.com/repos/huggingface/datasets/issues/1631 | https://api.github.com/repos/huggingface/datasets | https://api.github.com/repos/huggingface/datasets/issues/1631/labels{/name} | https://api.github.com/repos/huggingface/datasets/issues/1631/comments | https://api.github.com/repos/huggingface/datasets/issues/1631/events | https://github.com/huggingface/datasets/pull/1631 | 774,349,222 | MDExOlB1bGxSZXF1ZXN0NTQ1Mjc5MTE2 | 1,631 | Update README.md | {
"avatar_url": "https://avatars.githubusercontent.com/u/6584825?v=4",
"events_url": "https://api.github.com/users/savasy/events{/privacy}",
"followers_url": "https://api.github.com/users/savasy/followers",
"following_url": "https://api.github.com/users/savasy/following{/other_user}",
"gists_url": "https://api.github.com/users/savasy/gists{/gist_id}",
"gravatar_id": "",
"html_url": "https://github.com/savasy",
"id": 6584825,
"login": "savasy",
"node_id": "MDQ6VXNlcjY1ODQ4MjU=",
"organizations_url": "https://api.github.com/users/savasy/orgs",
"received_events_url": "https://api.github.com/users/savasy/received_events",
"repos_url": "https://api.github.com/users/savasy/repos",
"site_admin": false,
"starred_url": "https://api.github.com/users/savasy/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/savasy/subscriptions",
"type": "User",
"url": "https://api.github.com/users/savasy"
} | [] | closed | false | null | [] | null | 0 | "2020-12-24T11:45:52Z" | "2020-12-28T17:35:41Z" | "2020-12-28T17:16:04Z" | CONTRIBUTOR | null | 0 | {
"diff_url": "https://github.com/huggingface/datasets/pull/1631.diff",
"html_url": "https://github.com/huggingface/datasets/pull/1631",
"merged_at": "2020-12-28T17:16:04Z",
"patch_url": "https://github.com/huggingface/datasets/pull/1631.patch",
"url": "https://api.github.com/repos/huggingface/datasets/pulls/1631"
} | I made small change for citation | {
"+1": 1,
"-1": 0,
"confused": 0,
"eyes": 0,
"heart": 0,
"hooray": 0,
"laugh": 0,
"rocket": 0,
"total_count": 1,
"url": "https://api.github.com/repos/huggingface/datasets/issues/1631/reactions"
} | https://api.github.com/repos/huggingface/datasets/issues/1631/timeline | null | null | true |
Subsets and Splits