Aananda-giri commited on
Commit
0dcf7f7
·
verified ·
1 Parent(s): cfdfd95

Upload scrapy_engine/load_data.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. scrapy_engine/load_data.py +75 -0
scrapy_engine/load_data.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Get chunks of data 10000 paragraphs at a time
2
+
3
+ import csv
4
+ import json
5
+ import sys
6
+
7
+ # Increase the CSV field size limit
8
+ csv.field_size_limit(sys.maxsize) # otherwise it gives "Error: field larger than field limit (131072)"
9
+
10
+ def load_data(file_path, chunk_size=10000):
11
+ """Reads a CSV file in chunks of specified size.
12
+
13
+ Args:
14
+ file_path: Path to the CSV file.
15
+ chunk_size: Number of rows to read at a time.
16
+
17
+ Yields:
18
+ A list of rows for each chunk.
19
+ """
20
+
21
+ with open(file_path, 'r') as csvfile:
22
+ reader = csv.reader(csvfile)
23
+
24
+ # # Skip the header
25
+ # next(reader)
26
+
27
+ chunk = []
28
+ first_row=True
29
+ for row in reader:
30
+ # paragraphs: convert back to list
31
+ if not first_row:
32
+ row[2] = json.loads(row[2])
33
+ else:
34
+ first_row=False
35
+ chunk.append(row)
36
+ if len(chunk) >= chunk_size:
37
+ yield chunk
38
+ chunk = []
39
+
40
+ if chunk: # Handle the last chunk if not empty
41
+ yield chunk
42
+
43
+
44
+ if __name__ == '__main__':
45
+ file_path = '/content/drive/MyDrive/Research/datasets/crawled_data/crawled_data.csv'
46
+ chunk_size = 100 # 10000
47
+
48
+ # Example usage:
49
+ for chunk in load_data(file_path, chunk_size):
50
+ # .............................................
51
+ '''
52
+ * code to Process each chunk of data here
53
+ * each chunk is list of list.
54
+ * format of inner list of chunk is is: ['parent_url', 'page_title', 'paragraph']
55
+
56
+ e.g.
57
+ chunk = [
58
+ ['https://www.bbc.com/nepali','मुख पृष्ठ - BBC News नेपाली', 'सुर्खेत र जुम्लामा बाहेक कर्णालीका अरू जिल्लामा शिशुका लागि आवश्यक एनआईसीयू सेवा नै उपलब्ध छैन।'],
59
+ ['https://www.bbc.com/nepali', 'मुख पृष्ठ - BBC News नेपाली', 'नेपालले करिब एक महिना अघि नै औपचारिक पत्र पठाएर जीबी राईलाई स्वदेश फर्काइदिन गरेको आग्रहबारे मलेशियाले कुनै औपचारिक जबाफ दिएको छैन।'],
60
+ ...
61
+ ]
62
+
63
+ '''
64
+
65
+ # .............................................
66
+ print(f' columns : {chunk[0]}')
67
+
68
+ # First row
69
+ url = chunk[1][0]
70
+ title = chunk[1][1]
71
+ paragraphs = chunk[1][2]
72
+ print(f' row-1: url:{url}, title:{title}, \n paragraphs: {paragraphs}')
73
+
74
+ # do processing stuff
75
+ break