File size: 2,641 Bytes
0dcf7f7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# Get chunks of data 10000 paragraphs at a time

import csv
import json
import sys

# Increase the CSV field size limit
csv.field_size_limit(sys.maxsize) # otherwise it gives "Error: field larger than field limit (131072)"

def load_data(file_path, chunk_size=10000):
  """Reads a CSV file in chunks of specified size.

  Args:
    file_path: Path to the CSV file.
    chunk_size: Number of rows to read at a time.

  Yields:
    A list of rows for each chunk.
  """

  with open(file_path, 'r') as csvfile:
    reader = csv.reader(csvfile)

    # # Skip the header
    # next(reader)

    chunk = []
    first_row=True
    for row in reader:
      # paragraphs: convert back to list
      if not first_row:
          row[2] = json.loads(row[2])
      else:
          first_row=False
      chunk.append(row)
      if len(chunk) >= chunk_size:
        yield chunk
        chunk = []
    
    if chunk:  # Handle the last chunk if not empty
      yield chunk


if __name__ == '__main__':
    file_path = '/content/drive/MyDrive/Research/datasets/crawled_data/crawled_data.csv'
    chunk_size = 100  # 10000

    # Example usage:
    for chunk in load_data(file_path, chunk_size):
        # .............................................
        '''
        * code to Process each chunk of data here
        * each chunk is list of list.
        * format of inner list of chunk is is: ['parent_url', 'page_title', 'paragraph']

        e.g.
        chunk = [
          ['https://www.bbc.com/nepali','मुख पृष्ठ - BBC News नेपाली', 'सुर्खेत र जुम्लामा बाहेक कर्णालीका अरू जिल्लामा शिशुका लागि आवश्यक एनआईसीयू सेवा नै उपलब्ध छैन।'],
          ['https://www.bbc.com/nepali', 'मुख पृष्ठ - BBC News नेपाली', 'नेपालले करिब एक महिना अघि नै औपचारिक पत्र पठाएर जीबी राईलाई स्वदेश फर्काइदिन गरेको आग्रहबारे मलेशियाले कुनै औपचारिक जबाफ दिएको छैन।'],
          ...
        ]

        '''

        # .............................................
        print(f' columns : {chunk[0]}')

        # First row
        url = chunk[1][0]
        title = chunk[1][1]
        paragraphs = chunk[1][2]
        print(f' row-1: url:{url},  title:{title}, \n paragraphs: {paragraphs}')

        # do processing stuff
        break