Update files from the datasets library (from 1.4.0)
Browse filesRelease notes: https://github.com/huggingface/datasets/releases/tag/1.4.0
c4.py
CHANGED
|
@@ -19,7 +19,6 @@
|
|
| 19 |
from __future__ import absolute_import, division, print_function
|
| 20 |
|
| 21 |
import json
|
| 22 |
-
import logging
|
| 23 |
import os
|
| 24 |
|
| 25 |
import datasets
|
|
@@ -39,6 +38,9 @@ from .c4_utils import (
|
|
| 39 |
)
|
| 40 |
|
| 41 |
|
|
|
|
|
|
|
|
|
|
| 42 |
_DESCRIPTION = """\
|
| 43 |
A colossal, cleaned version of Common Crawl's web crawl corpus.
|
| 44 |
|
|
@@ -215,7 +217,7 @@ class C4(datasets.BeamBasedBuilder):
|
|
| 215 |
cc_dir, "*.warc.wet.gz", self.manual_download_instructions
|
| 216 |
)
|
| 217 |
)
|
| 218 |
-
|
| 219 |
file_paths["wet_files"].extend(wet_files)
|
| 220 |
|
| 221 |
page_content_pcollection = self._get_page_content(pipeline, file_paths, dl_manager)
|
|
|
|
| 19 |
from __future__ import absolute_import, division, print_function
|
| 20 |
|
| 21 |
import json
|
|
|
|
| 22 |
import os
|
| 23 |
|
| 24 |
import datasets
|
|
|
|
| 38 |
)
|
| 39 |
|
| 40 |
|
| 41 |
+
logger = datasets.logging.get_logger(__name__)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
_DESCRIPTION = """\
|
| 45 |
A colossal, cleaned version of Common Crawl's web crawl corpus.
|
| 46 |
|
|
|
|
| 217 |
cc_dir, "*.warc.wet.gz", self.manual_download_instructions
|
| 218 |
)
|
| 219 |
)
|
| 220 |
+
logger.info("Adding %d WET files for manually downloaded version %s.", len(wet_files), cc_version)
|
| 221 |
file_paths["wet_files"].extend(wet_files)
|
| 222 |
|
| 223 |
page_content_pcollection = self._get_page_content(pipeline, file_paths, dl_manager)
|