WebashalarForML commited on
Commit
b5a963d
·
verified ·
1 Parent(s): 964569e

Update utils/json_to_spacy.py

Browse files
Files changed (1) hide show
  1. utils/json_to_spacy.py +66 -46
utils/json_to_spacy.py CHANGED
@@ -1,8 +1,15 @@
1
  import json
2
  import spacy
3
  from spacy.tokens import DocBin
 
4
 
5
  def read_in_chunks(file_path, chunk_size=1024):
 
 
 
 
 
 
6
  with open(file_path, 'r', encoding='utf-8') as file:
7
  while True:
8
  data = file.read(chunk_size)
@@ -10,58 +17,71 @@ def read_in_chunks(file_path, chunk_size=1024):
10
  break
11
  yield data
12
 
13
- def convert_json_to_spacy(json_file_path, spacy_file_path):
14
- # Read the file in chunks and combine the chunks
15
- file_content = ""
16
- for chunk in read_in_chunks(json_file_path):
17
- file_content += chunk
 
 
 
 
 
 
 
 
 
18
 
19
- # Parse the JSON data
20
- data = json.loads(file_content)
 
 
 
 
 
 
 
 
 
21
 
22
- # Prepare the data for spaCy
23
- spacy_format = []
24
 
25
- for item in data:
26
- text = item[0] # The first element in the list is the text
27
- entities = item[1]['entities'] # The second element contains the dictionary with 'entities'
28
- spacy_entities = [(start, end, label) for start, end, label in entities]
29
- spacy_format.append({"text": text, "entities": spacy_entities})
30
 
31
- # Create a blank English model
32
- nlp = spacy.blank("en")
 
33
 
34
- # Initialize a DocBin object
35
- doc_bin = DocBin()
 
 
 
36
 
37
- # Convert the data to spaCy Doc objects and add to DocBin
38
- for entry in spacy_format:
39
- doc = nlp.make_doc(entry["text"])
40
- # Convert entities
41
- entities = []
42
- seen_positions = set() # To track positions and avoid overlap
43
- for start, end, label in entry["entities"]:
44
- # Ensure span is within the document's length
45
- if start < 0 or end > len(doc.text) or start >= end:
46
- print(f"Invalid span: start={start}, end={end}, label={label}")
47
- continue
48
 
49
- # Check for overlaps and prioritize entities
50
- if not any(start < e_end and end > e_start for e_start, e_end, _ in seen_positions):
51
- span = doc.char_span(start, end, label=label)
52
- if span is not None:
53
- entities.append(span)
54
- seen_positions.add((start, end, label))
55
- else:
56
- print(f"Overlapping span: start={start}, end={end}, label={label}")
57
-
58
- # Set entities
59
- doc.ents = entities
60
-
61
- # Add to DocBin
62
- doc_bin.add(doc)
63
 
64
- # Save the DocBin to a .spacy file
65
- doc_bin.to_disk(spacy_file_path)
66
 
67
- print(f"Data has been successfully saved to {spacy_file_path}!")
 
 
 
 
1
  import json
2
  import spacy
3
  from spacy.tokens import DocBin
4
+ import os
5
 
6
  def read_in_chunks(file_path, chunk_size=1024):
7
+ """Read file in chunks to handle large files."""
8
+ print(f"Reading file: {file_path}")
9
+ if not os.path.exists(file_path):
10
+ print(f"Error: File not found at {file_path}")
11
+ return
12
+
13
  with open(file_path, 'r', encoding='utf-8') as file:
14
  while True:
15
  data = file.read(chunk_size)
 
17
  break
18
  yield data
19
 
20
+ def extract_text_and_entities(item):
21
+ """Dynamically extract text and entities, handling multiple JSON formats."""
22
+ print(f"Processing item: {item}")
23
+ if isinstance(item, dict):
24
+ # Dictionary structure: {"text": ..., "entities": ...}
25
+ text = item.get("text", "")
26
+ entities = item.get("entities", [])
27
+ elif isinstance(item, list) and len(item) >= 2:
28
+ # List structure: ["text", {"entities": ...}]
29
+ text = item[0] if isinstance(item[0], str) else ""
30
+ entities = item[1].get("entities", []) if isinstance(item[1], dict) else []
31
+ else:
32
+ print(f"Unexpected item format: {item}")
33
+ return None, [] # Return empty text and entities
34
 
35
+ valid_entities = [
36
+ (start, end, label) for start, end, label in entities
37
+ if isinstance(start, int) and isinstance(end, int) and isinstance(label, str)
38
+ ]
39
+ return text, valid_entities
40
+
41
+ def convert_json_to_spacy(json_file_path, spacy_file_path):
42
+ """Convert JSON data to spaCy format and save as .spacy file."""
43
+ try:
44
+ print(f"Reading JSON from: {json_file_path}")
45
+ file_content = "".join(chunk for chunk in read_in_chunks(json_file_path))
46
 
47
+ data = json.loads(file_content) # Parse JSON data
48
+ print(f"Successfully loaded JSON data. Found {len(data)} items.")
49
 
50
+ spacy_format = []
51
+ for item in data:
52
+ text, entities = extract_text_and_entities(item)
53
+ if text: # Skip if text is empty or invalid
54
+ spacy_format.append({"text": text, "entities": entities})
55
 
56
+ # Create a blank spaCy model
57
+ nlp = spacy.blank("en")
58
+ doc_bin = DocBin()
59
 
60
+ for entry in spacy_format:
61
+ print(f"Creating spaCy Doc for text: {entry['text']}")
62
+ doc = nlp.make_doc(entry["text"])
63
+ entities = []
64
+ seen_positions = set()
65
 
66
+ for start, end, label in entry["entities"]:
67
+ if start < 0 or end > len(doc.text) or start >= end:
68
+ print(f"Invalid span: start={start}, end={end}, label={label}")
69
+ continue
70
+ if not any(start < e_end and end > e_start for e_start, e_end, _ in seen_positions):
71
+ span = doc.char_span(start, end, label=label)
72
+ if span is not None:
73
+ entities.append(span)
74
+ seen_positions.add((start, end, label))
75
+ else:
76
+ print(f"Overlapping span: start={start}, end={end}, label={label}")
77
 
78
+ doc.ents = entities
79
+ doc_bin.add(doc)
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
+ doc_bin.to_disk(spacy_file_path)
82
+ print(f"Data has been successfully saved to {spacy_file_path}!")
83
 
84
+ except json.JSONDecodeError as e:
85
+ print(f"Error decoding JSON: {e}")
86
+ except Exception as e:
87
+ print(f"An unexpected error occurred: {e}")