Datasets:

Modalities:
Text
Languages:
English
Libraries:
Datasets
License:
asahi417 commited on
Commit
0a0c5b5
·
1 Parent(s): 996029c

Update create_split.py

Browse files
Files changed (1) hide show
  1. create_split.py +17 -17
create_split.py CHANGED
@@ -22,22 +22,22 @@ def get_test_predicate(_data):
22
  return predicates_test
23
 
24
 
25
- if not os.path.exists("data/t_rex.filter_unified.test.jsonl"):
26
- with open(f"data/t_rex.filter_unified.min_entity_{max(parameters_min_e_freq)}_max_predicate_{min(parameters_max_p_freq)}.jsonl") as f:
27
- data = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
28
- pred_test = get_test_predicate(data)
29
- data_test = [i for i in data if i['predicate'] in pred_test]
30
- f_writer = open("data/t_rex.filter_unified.test.jsonl", 'w')
31
- for n, i in enumerate(data_test):
32
- print(f"\n[{n+1}/{len(data_test)}]")
33
- print(f"{json.dumps(i, indent=4)}")
34
- flag = input(">>> (enter to add to test)")
35
- if flag == '':
36
- i['relation'] = i.pop('predicate')
37
- i['head'] = i.pop('subject')
38
- i['tail'] = i.pop('object')
39
- f_writer.write(json.dumps(i) + '\n')
40
- f_writer.close()
41
 
42
  with open("data/t_rex.filter_unified.test.jsonl") as f:
43
  data_test = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
@@ -45,7 +45,7 @@ with open("data/t_rex.filter_unified.test.jsonl") as f:
45
 
46
 
47
  seed(42)
48
- with open(f"data/t_rex.filter_unified.jsonl") as f:
49
  data = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
50
  for i in data:
51
  i['relation'] = i.pop('predicate')
 
22
  return predicates_test
23
 
24
 
25
+ # if not os.path.exists("data/t_rex.filter_unified.test.jsonl"):
26
+ # with open(f"data/t_rex.filter_unified.min_entity_{max(parameters_min_e_freq)}_max_predicate_{min(parameters_max_p_freq)}.jsonl") as f:
27
+ # data = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
28
+ # pred_test = get_test_predicate(data)
29
+ # data_test = [i for i in data if i['predicate'] in pred_test]
30
+ # f_writer = open("data/t_rex.filter_unified.test.jsonl", 'w')
31
+ # for n, i in enumerate(data_test):
32
+ # print(f"\n[{n+1}/{len(data_test)}]")
33
+ # print(f"{json.dumps(i, indent=4)}")
34
+ # flag = input(">>> (enter to add to test)")
35
+ # if flag == '':
36
+ # i['relation'] = i.pop('predicate')
37
+ # i['head'] = i.pop('subject')
38
+ # i['tail'] = i.pop('object')
39
+ # f_writer.write(json.dumps(i) + '\n')
40
+ # f_writer.close()
41
 
42
  with open("data/t_rex.filter_unified.test.jsonl") as f:
43
  data_test = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
 
45
 
46
 
47
  seed(42)
48
+ with open(f"data/t_rex.filter_unified.min_entity_5.jsonl") as f:
49
  data = [json.loads(i) for i in f.read().split('\n') if len(i) > 0]
50
  for i in data:
51
  i['relation'] = i.pop('predicate')