introvoyz041 commited on
Commit
5ad11ec
·
verified ·
1 Parent(s): 65d1018

Migrated from GitHub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +26 -0
  2. data/Correlation/correlation.py +204 -0
  3. data/Snakefile +261 -0
  4. data/config.json +14 -0
  5. data/debug_reports.py +176 -0
  6. data/environment.yml +11 -0
  7. data/imagej_macro/ImageJ_plugins/3D_suite/combinatoricslib-2.0.jar +0 -0
  8. data/imagej_macro/ImageJ_plugins/3D_suite/droplet_finder.jar +0 -0
  9. data/imagej_macro/ImageJ_plugins/3D_suite/imageware.jar +3 -0
  10. data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d-core3.93.jar +3 -0
  11. data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d_plugins3.93.jar +3 -0
  12. data/imagej_macro/ImageJ_plugins/3D_viewer/.directory +4 -0
  13. data/imagej_macro/ImageJ_plugins/3D_viewer/3D_Viewer-4.0.1.jar +3 -0
  14. data/imagej_macro/ImageJ_plugins/3D_viewer/VIB-lib-2.1.1.jar +3 -0
  15. data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-linux-amd64.jar +0 -0
  16. data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-macosx-universal.jar +0 -0
  17. data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-windows-amd64.jar +0 -0
  18. data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2.jar +3 -0
  19. data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-main-2.3.2.jar +0 -0
  20. data/imagej_macro/ImageJ_plugins/3D_viewer/j3dcore-1.6.0-scijava-2.jar +3 -0
  21. data/imagej_macro/ImageJ_plugins/3D_viewer/j3dutils-1.6.0-scijava-2.jar +3 -0
  22. data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-linux-amd64.jar +3 -0
  23. data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-macosx-universal.jar +3 -0
  24. data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-windows-amd64.jar +3 -0
  25. data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2.jar +3 -0
  26. data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-main-2.3.2.jar +0 -0
  27. data/imagej_macro/ImageJ_plugins/3D_viewer/vecmath-1.6.0-scijava-2.jar +3 -0
  28. data/imagej_macro/ImageJ_plugins/readme.txt +35 -0
  29. data/imagej_macro/ImageJ_plugins/spatial3dtissuej_plugin/TissueJ4Merfish_v14.jar +3 -0
  30. data/imagej_macro/ImageJ_plugins/utils/3D_Convex_Hull.jar +3 -0
  31. data/imagej_macro/ImageJ_plugins/utils/Fiji_Plugins-3.1.1.jar +0 -0
  32. data/imagej_macro/ImageJ_plugins/utils/SlideJ_.jar +0 -0
  33. data/imagej_macro/ImageJ_plugins/utils/fiji-lib-2.1.2.jar +0 -0
  34. data/imagej_macro/ImageJ_plugins/utils/imagescience.jar +3 -0
  35. data/imagej_macro/ImageJ_plugins/utils/mpicbg_-1.4.1.jar +3 -0
  36. data/imagej_macro/ImageJ_plugins/utils/quickhull3d-1.0.0.jar +0 -0
  37. data/imagej_macro/bleed_throught_validate/bleed_throught_macro.ijm +155 -0
  38. data/imagej_macro/bleed_throught_validate/raw/merFISH_02_007_01_wavelength_561.TIFF +3 -0
  39. data/imagej_macro/bleed_throught_validate/raw/merFISH_02_007_01_wavelength_647.TIFF +3 -0
  40. data/imagej_macro/bleed_throught_validate/results/bleed_throught_report.csv +2 -0
  41. data/imagej_macro/bleed_throught_validate/results/bleedthrought_signals.zip +3 -0
  42. data/imagej_macro/bleed_throught_validate/results/merFISH_02_007_01_wavelength_561_BINARY.zip +3 -0
  43. data/imagej_macro/bleed_throught_validate/results/merFISH_02_007_01_wavelength_647_BINARY.zip +3 -0
  44. data/imagej_macro/blur_detector/dataset1/merFISH_01_025_05.TIFF +3 -0
  45. data/imagej_macro/blur_detector/dataset1/merFISH_08_025_05.TIFF +3 -0
  46. data/imagej_macro/blur_detector/dataset2/merFISH_05_025_05.TIFF +3 -0
  47. data/imagej_macro/blur_detector/dataset2/merFISH_06_025_05.TIFF +3 -0
  48. data/imagej_macro/blur_detector/detecting_blur_image.R +85 -0
  49. data/imagej_macro/blur_detector/execute_blur_detector_dataset1.sh +59 -0
  50. data/imagej_macro/blur_detector/execute_blur_detector_dataset2.sh +59 -0
.gitattributes CHANGED
@@ -57,3 +57,29 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ data/imagej_macro/ImageJ_plugins/3D_suite/imageware.jar filter=lfs diff=lfs merge=lfs -text
61
+ data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d-core3.93.jar filter=lfs diff=lfs merge=lfs -text
62
+ data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d_plugins3.93.jar filter=lfs diff=lfs merge=lfs -text
63
+ data/imagej_macro/ImageJ_plugins/3D_viewer/3D_Viewer-4.0.1.jar filter=lfs diff=lfs merge=lfs -text
64
+ data/imagej_macro/ImageJ_plugins/3D_viewer/VIB-lib-2.1.1.jar filter=lfs diff=lfs merge=lfs -text
65
+ data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2.jar filter=lfs diff=lfs merge=lfs -text
66
+ data/imagej_macro/ImageJ_plugins/3D_viewer/j3dcore-1.6.0-scijava-2.jar filter=lfs diff=lfs merge=lfs -text
67
+ data/imagej_macro/ImageJ_plugins/3D_viewer/j3dutils-1.6.0-scijava-2.jar filter=lfs diff=lfs merge=lfs -text
68
+ data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-linux-amd64.jar filter=lfs diff=lfs merge=lfs -text
69
+ data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-macosx-universal.jar filter=lfs diff=lfs merge=lfs -text
70
+ data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-windows-amd64.jar filter=lfs diff=lfs merge=lfs -text
71
+ data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2.jar filter=lfs diff=lfs merge=lfs -text
72
+ data/imagej_macro/ImageJ_plugins/3D_viewer/vecmath-1.6.0-scijava-2.jar filter=lfs diff=lfs merge=lfs -text
73
+ data/imagej_macro/ImageJ_plugins/spatial3dtissuej_plugin/TissueJ4Merfish_v14.jar filter=lfs diff=lfs merge=lfs -text
74
+ data/imagej_macro/ImageJ_plugins/utils/3D_Convex_Hull.jar filter=lfs diff=lfs merge=lfs -text
75
+ data/imagej_macro/ImageJ_plugins/utils/imagescience.jar filter=lfs diff=lfs merge=lfs -text
76
+ data/imagej_macro/ImageJ_plugins/utils/mpicbg_-1.4.1.jar filter=lfs diff=lfs merge=lfs -text
77
+ data/imagej_macro/cell_zone_detection/TissueJ4Merfish_v14.jar filter=lfs diff=lfs merge=lfs -text
78
+ data/imagej_macro/cell_zone_detection/merFISH_01_025_08-SEG.tif filter=lfs diff=lfs merge=lfs -text
79
+ data/imagej_macro/cell_zone_detection/merFISH_01_025_08-mask.tif filter=lfs diff=lfs merge=lfs -text
80
+ data/imagej_macro/nuclei_segmentation/cell_zone_results/aligned_images_fov0_z18_01_cell_zone_rad_5.0.tif filter=lfs diff=lfs merge=lfs -text
81
+ data/imagej_macro/nuclei_segmentation/demo_results/aligned_images_fov0_z18_01_SEG_demo_only.tif filter=lfs diff=lfs merge=lfs -text
82
+ data/imagej_macro/nuclei_segmentation/seg_results/aligned_images_fov0_z18_01_SEG.tif filter=lfs diff=lfs merge=lfs -text
83
+ data/imagej_macro/nuclei_segmentation/seg_results/aligned_images_fov0_z18_02_SEG.tif filter=lfs diff=lfs merge=lfs -text
84
+ data/imagej_macro/nuclei_segmentation/testing_images/aligned_images_fov0_z18_01.tif filter=lfs diff=lfs merge=lfs -text
85
+ data/imagej_macro/nuclei_segmentation/testing_images/aligned_images_fov0_z18_02.tif filter=lfs diff=lfs merge=lfs -text
data/Correlation/correlation.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+
4
+
5
+ # Import
6
+ import cProfile
7
+ from distutils import core
8
+ from pathlib import Path
9
+ import numpy as np
10
+ import scipy as sp
11
+ import pandas as pd
12
+ import matplotlib.pyplot as plt
13
+ from matplotlib.backends.backend_pdf import PdfPages
14
+ import seaborn as sns
15
+ import os
16
+ import pickle
17
+ import scipy.stats as stats
18
+ import argparse
19
+ from adjustText import adjust_text
20
+ import matplotlib.backends.backend_pdf
21
+
22
+
23
+
24
+ def main():
25
+ parser = argparse.ArgumentParser()
26
+ parser.add_argument('-b', type = str, help = 'Path to barcodes')
27
+ parser.add_argument('-xp', type = str, help = 'Experiment ID')
28
+ parser.add_argument('-c', type = str, default = "D:\BCCAncer\FILES\codebook_0_C1E1_van.csv", help = 'Path to codebook')
29
+ parser.add_argument('-a', type = str, default = "D:\BCCAncer\FILES\XP2474_4T1_C1E1_new_bulk.csv", help = 'Path to bulk file (abundance file)')
30
+ parser.add_argument('-o', type = str, default = "D:\BCCAncer\EXP\XP7174\Correlations\BulkCorr", help = 'Path to save processed files')
31
+ parser.add_argument('-is_cambridge', type = bool, default = False, help = 'Are the results for Cambridge datasets?')
32
+ parser.add_argument('-drop_blanks', type = bool, default = False, help = 'Remove blanks before correlation')
33
+ parser.add_argument('-log', type =bool, default = True, help = 'Do we want to take log while correlation of bulk?')
34
+ parser.add_argument("-d", nargs="+", type = float, default=0.65, help = 'Max mean distance threshold for an area of detected barcode')
35
+ parser.add_argument("-removeZ", nargs="+", type=int, default=None, help="Z slices to remove from evaluation")
36
+ #parser.add_argument('-gene_list', type = str, action='append', required=True, help = 'List of genes which we want to exclude from analysis')
37
+
38
+
39
+ args = parser.parse_args()
40
+
41
+ if not os.path.exists(args.o):
42
+ os.makedirs(args.o)
43
+
44
+ info_np = np.zeros(shape = (len(args.d), 7))
45
+
46
+ # read sheet and remove undesired z slices
47
+ df = read_sheet(args.b)
48
+ if args.removeZ is not None:
49
+ removeZ_set = set(args.removeZ)
50
+ df = df[~df['z'].isin(removeZ_set)]
51
+
52
+
53
+
54
+ pdf = matplotlib.backends.backend_pdf.PdfPages(f'{args.o}/{args.xp}_correlation.pdf')
55
+ correlation = Correlation(args.c, df, args.a, args.o, args.xp, args.is_cambridge, pdf)
56
+ args.d.sort(reverse = True)
57
+
58
+
59
+ for idx, dist_threshold in enumerate(args.d):
60
+ filter_df = correlation.filter_distance(dist_threshold)
61
+ gp_df = correlation.groupby(filter_df)
62
+ gp_df = correlation.merge_df_cb(gp_df)
63
+
64
+ tot_counts = gp_df.counts.sum()
65
+ blank_counts = gp_df.loc[gp_df['gene_symbol'].isin(['Blank_01', 'Blank_02', 'Blank_03', 'Blank_04', 'Blank_05', 'Blank_06'])].counts.sum()
66
+
67
+ info_np[idx][0] = dist_threshold
68
+ info_np[idx][3] = int(tot_counts)
69
+ info_np[idx][4] = int(blank_counts)
70
+ info_np[idx][5] = int(len(gp_df))
71
+
72
+ gp_df = correlation.df_bulk(gp_df)
73
+
74
+ if args.drop_blanks:
75
+ gp_df = correlation.remove_blanks(gp_df)
76
+
77
+ """
78
+ if args.gene_list:
79
+ print('removing')
80
+ gp_df = correlation.remove_specific_genes(gp_df)
81
+ """
82
+
83
+ info_np[idx][-1] = len(gp_df)
84
+
85
+ gp_df['log_counts'] = np.log2(gp_df['counts']+1)
86
+ gp_df['log_tpm'] = np.log2(gp_df['bulk_exp']+0.0001)
87
+
88
+ gp_df.to_csv(f'{args.o}/{args.xp}_{dist_threshold}.csv')
89
+
90
+
91
+ info_np[idx][1], info_np[idx][2] = correlation.log_correlation(gp_df, dist_threshold)
92
+
93
+ pdf.close()
94
+
95
+ info_df = pd.DataFrame(info_np, columns = ['Distance Threshold',\
96
+ 'Pearson correlation',\
97
+ 'Spearman correlation',\
98
+ '# detected barcodes (including control barcodes)',\
99
+ '# detected control (blanks) barcodes',\
100
+ '# genes at dist threshold in correlation',\
101
+ '# barcodes counted towards correlaton estimation'])
102
+ info_df.to_csv(f'{args.o}/info_{args.xp}.csv')
103
+
104
+ def read_sheet(file):
105
+
106
+ ext = file.split(".")[-1]
107
+ if ext == "csv":
108
+ df = pd.read_csv(file)
109
+ elif ext == "tsv":
110
+ df = pd.read_csv(file, '\t')
111
+ elif ext in {"xls", "xlsx", "xlsm", "xlsb"}:
112
+ df = pd.read_excel(file)
113
+ else:
114
+ raise ValueError("Unexpected file extension")
115
+ return df
116
+
117
+ class Correlation:
118
+ def __init__(self, codebook, barcodes, bulk, output_dir, name, is_cambridge, pdf_object):
119
+ self.codebook = read_sheet(codebook)
120
+ self.barcodes = barcodes
121
+ self.bulk = read_sheet(bulk)
122
+ self.output_dir = output_dir
123
+ self.name = name
124
+ self.is_cambridge = is_cambridge
125
+ self.pdf_object = pdf_object
126
+ #self.remove_genes_list = remove_genes_list
127
+
128
+ def filter_distance(self, distance_threshold):
129
+ return self.barcodes.loc[self.barcodes['mean_distance']<distance_threshold]
130
+
131
+ def groupby(self, df):
132
+ return (df.groupby('barcode_id').size().reset_index(name='counts'))
133
+
134
+ def modify_codebook(self):
135
+ self.codebook['barcode_id'] = self.codebook.index
136
+ self.codebook.rename(columns={"name": "gene_symbol"}, inplace = True)
137
+
138
+ def merge_df_cb(self,df):
139
+ self.modify_codebook()
140
+ return pd.merge(self.codebook, df, how='inner', on='barcode_id')
141
+
142
+ def df_bulk(self, df):
143
+ return pd.merge(self.bulk, df, how='inner', on='gene_symbol')
144
+
145
+ def remove_blanks(self,df):
146
+ return df[df['bulk_exp'] != 0]
147
+
148
+ """
149
+ def remove_specific_genes(self,df):
150
+ c = 0
151
+ print(self.remove_genes_list)
152
+ for gene in self.remove_genes_list:
153
+ print(c, gene)
154
+ df = df.loc[df['gene_symbol']!=gene]
155
+ #cccprint(df.loc[df['gene_symbol']==gene])
156
+ c+=1
157
+ return df
158
+ """
159
+
160
+
161
+ def log_correlation(self, df, dist_threshold):
162
+ f1, ax = plt.subplots(figsize=(9, 9))
163
+ sns.set_palette("deep")
164
+ sns.scatterplot(x="log_tpm",y="log_counts",data=df,ax=ax)
165
+
166
+ if self.is_cambridge == True:
167
+ count_type = "C_counts"
168
+ else: count_type = "V_counts"
169
+
170
+
171
+ ax.set_title("TPM Correlation for " + self.name, fontsize = 15)
172
+ ax.set_xlabel("log2(TPM+1e-4), V_bulk", fontsize = 20)
173
+ ax.set_ylabel("log2(# detected counts+1), "+ count_type, fontsize = 20)
174
+
175
+
176
+ def plotlabel(xvar, yvar, label):
177
+ ax.text(xvar+0.02, yvar, label)
178
+
179
+
180
+ pearson, _ = stats.pearsonr(df["log_tpm"],df["log_counts"])
181
+ spearman, _ = stats.spearmanr(df["log_tpm"],df["log_counts"])
182
+ #kendalltau, _ = stats.kendalltau(df["log_tpm"],df["log_counts"])
183
+
184
+ ax.text(.01, .95, 'Pearson = {:.2f}\nSpearman = {:.2f}'.format(pearson,spearman),transform=ax.transAxes)
185
+ #df.apply(lambda x: plotlabel(x['log_tpm'], x['log_counts'], x['gene_symbol']), axis=1)
186
+ #plt.axvline(x = 0, color = 'r', label = 'axvline - full height')
187
+
188
+ self.pdf_object.savefig(f1)
189
+
190
+ texts = []
191
+ for xs,ys,label in zip(df['log_tpm'],df['log_counts'],df['gene_symbol']):
192
+ texts.append(ax.text(xs,ys,label))
193
+ adjust_text(texts, force_points=0.2, force_text=0.2,expand_points=(1, 1), expand_text=(1, 1), arrowprops=dict(arrowstyle="-", color='black', lw=0.5))
194
+ plt.tight_layout()
195
+
196
+ #plt.savefig(f'{self.output_dir}/{self.name}_{dist_threshold}.pdf')
197
+ self.pdf_object.savefig(f1)
198
+
199
+ return pearson, spearman
200
+
201
+
202
+ if __name__ == '__main__':
203
+ #cProfile.run('main()')
204
+ main()
data/Snakefile ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+ import re
3
+ import os
4
+ import makereports as reports
5
+ from utils import fileIO,imgproc
6
+
7
+ configfile: "config.json"
8
+
9
+
10
+ def download_azure():
11
+
12
+ if config["isRemote"]=="N" or os.path.exists(os.path.join(config['results_path'],'data')):
13
+ return None
14
+
15
+ shell(config["azure_command"].format(config["raw_data_path"],os.path.join(config['results_path'],'data')))
16
+
17
+ return None
18
+
19
+ def check_params():
20
+
21
+ # Check to see if there are any params that need to be found
22
+ b_findz = len(config['z'])==0
23
+ b_findfov = len(config['fov'])==0
24
+ b_findir = len(config['ir'])==0
25
+ b_findwv = len(config['channel'])==0
26
+
27
+
28
+ # get all the images
29
+ if config["isRemote"] == "Y":
30
+ files = Path(os.path.join(config['results_path'],'data')).rglob('*.TIFF')
31
+ else:
32
+ files = Path(config['raw_data_path']).rglob('*.TIFF')
33
+ files = list(map(str,files))
34
+ # Find all channel, ir, fov, and z from the file names
35
+ re_filter = r"(.*)(\d{3})(?=nm).*(\B\d{2})\D(\B\d{3})\D(\B\d{2}\b)"
36
+ param_filter = re.compile(re_filter)
37
+ results = list(map(param_filter.search,files))
38
+
39
+ results =list(filter(lambda x: isinstance(x,re.Match), results))
40
+ # Get the list of parameters if they are needed
41
+ # Group 0 is the full match, so each capture group is 1 indexed
42
+ if b_findz:
43
+ zs = list(map(lambda x: x.group(5),results))
44
+ else:
45
+ zs = config['z']
46
+
47
+ if b_findir:
48
+ irs = list(map(lambda x: x.group(3),results))
49
+ else:
50
+ irs = config['ir']
51
+
52
+ if b_findfov:
53
+ fovs = list(map(lambda x: x.group(4),results))
54
+ else:
55
+ fovs = config['fov']
56
+
57
+ if b_findwv:
58
+ wvs = list(map(lambda x: x.group(2),results))
59
+ else:
60
+ wvs = config['channel']
61
+
62
+ if isinstance(results,list) and len(results)>0:
63
+ full_raw_path = results[0].group(1)
64
+ else:
65
+ full_raw_path=''
66
+
67
+
68
+ return sorted(list(set(zs)),key=int),sorted(list(set(fovs)),key=int),sorted(list(set(irs)),key=int),sorted(list(set(wvs)),key=int),full_raw_path
69
+
70
+
71
+ download_azure()
72
+ zs,fovs,irs,wvs,full_raw_path = check_params()
73
+
74
+
75
+ default_message= "rule {rule}, {wildcards}, threads: {threads}"
76
+
77
+ rule all_done:
78
+ input:
79
+ os.path.join(config['results_path'],'brightness_report.t'),
80
+ os.path.join(config['results_path'],'focus_report.t'),
81
+ os.path.join(config['results_path'],'deconvolved_brightness_report.t'),
82
+ os.path.join(config['results_path'],'masked_brightness_report.t'),
83
+ os.path.join(config['results_path'],'decodability_report.t')
84
+ output:
85
+ os.path.join(config['results_path'],'all_done.t')
86
+ run:
87
+ if config["isRemote"]=="Y":
88
+ shell("rm -rf \"{}\"".format(os.path.join(config['results_path'],'data')))
89
+ if config["delete_stack"]=="Y":
90
+ shell("rm -f \"{}\"".format(os.path.join(config['results_path'],'img*')))
91
+ shell("touch \"{output[0]}\"")
92
+
93
+ rule all:
94
+ threads:1
95
+ input:
96
+ os.path.join(config['results_path'],'all_done.t')
97
+
98
+
99
+ def isRemote(wildcards):
100
+
101
+ if config["isRemote"]=="N":
102
+ return config["raw_data_path"]
103
+ else:
104
+ return directory(os.path.join(config['results_path'],'data'))
105
+
106
+ rule create_image_stack:
107
+ threads:1
108
+ message: default_message
109
+ input:
110
+ isRemote
111
+ output:
112
+ out_file = os.path.join(config['results_path'],'imgstack_{fov}_{z}.npy'),
113
+ coord_file = os.path.join(config['results_path'],'coord_{fov}_{z}.json')
114
+ run:
115
+ fileIO.create_image_stack(os.path.join(full_raw_path,config['raw_image_format']),
116
+ wildcards.fov,wildcards.z,irs,wvs,output.out_file,output.coord_file)
117
+
118
+
119
+ rule deconvolve_images:
120
+ threads:1
121
+ message: default_message
122
+ input:
123
+ in_file = os.path.join(config['results_path'],'imgstack_{fov}_{z}.npy'),
124
+ output:
125
+ out_file = os.path.join(config['results_path'],'deconvolved','deconvolved_{fov}_{z}.npy')
126
+ run:
127
+ imgproc._deconvolute(input.in_file,output.out_file)
128
+
129
+
130
+
131
+
132
+ rule deconvolved_brightness_report:
133
+ threads:1
134
+ message: default_message
135
+ input:
136
+ img_stack = os.path.join(config['results_path'],'deconvolved','deconvolved_{fov}_{z}.npy'),
137
+ coord_file = os.path.join(config['results_path'],'coord_{fov}_{z}.json')
138
+ output:
139
+ out=os.path.join(config['results_path'],'deconvolved_brightness_report_{fov}_{z}.pdf')
140
+ run:
141
+ reports.generate_brightness_reports(input.img_stack,input.coord_file,output.out,wildcards.fov,wildcards.z)
142
+
143
+
144
+ rule compile_deconvolved_brightness_report:
145
+ threads:1
146
+ message: default_message
147
+ input:
148
+ expand(os.path.join(config['results_path'],'deconvolved_brightness_report_{fov}_{z}.pdf'),fov=fovs,z=zs)
149
+ output:
150
+ os.path.join(config['results_path'],'deconvolved_brightness_report.t')
151
+ shell:
152
+ "touch \"{output}\""
153
+
154
+
155
+ rule create_mask_images:
156
+ threads:1
157
+ message: default_message
158
+ input:
159
+ img_stack = os.path.join(config['results_path'],'deconvolved','deconvolved_{fov}_{z}.npy'),
160
+ output:
161
+ out_mask=os.path.join(config['results_path'],'masked','mask_{fov}_{z}.npy')
162
+ run:
163
+ imgproc.maskImages(input.img_stack,output.out_mask)
164
+
165
+
166
+ rule masked_brightness_report:
167
+ threads:1
168
+ message: default_message
169
+ input:
170
+ img_stack = os.path.join(config['results_path'],'imgstack_{fov}_{z}.npy'),
171
+ masks = os.path.join(config['results_path'],'masked','mask_{fov}_{z}.npy'),
172
+ coord_file = os.path.join(config['results_path'],'coord_{fov}_{z}.json')
173
+ output:
174
+ out=os.path.join(config['results_path'],'masked_brightness_report_{fov}_{z}.pdf')
175
+ run:
176
+ reports.generate_masked_brightness_reports(input.img_stack,input.coord_file,output.out,wildcards.fov,wildcards.z,input.masks)
177
+
178
+ rule compile_masked_brightness_report:
179
+ threads:1
180
+ message: default_message
181
+ input:
182
+ expand(os.path.join(config['results_path'],'masked_brightness_report_{fov}_{z}.pdf'),fov=fovs,z=zs)
183
+ output:
184
+ os.path.join(config['results_path'],'masked_brightness_report.t')
185
+ shell:
186
+ "touch \"{output}\""
187
+
188
+ rule brightness_report:
189
+ threads:1
190
+ message: default_message
191
+ input:
192
+ img_stack = os.path.join(config['results_path'],'imgstack_{fov}_{z}.npy'),
193
+ coord_file = os.path.join(config['results_path'],'coord_{fov}_{z}.json')
194
+ output:
195
+ out=os.path.join(config['results_path'],'brightness_report_{fov}_{z}.pdf')
196
+ run:
197
+ reports.generate_brightness_reports(input.img_stack,input.coord_file,output.out,wildcards.fov,wildcards.z)
198
+
199
+ rule compile_brightness_report:
200
+ threads:1
201
+ message: default_message
202
+ input:
203
+ expand(os.path.join(config['results_path'],'brightness_report_{fov}_{z}.pdf'),fov=fovs,z=zs)
204
+ output:
205
+ os.path.join(config['results_path'],'brightness_report.t')
206
+ shell:
207
+ "touch \"{output}\""
208
+
209
+
210
+
211
+
212
+ rule focus_report:
213
+ threads:1
214
+ message: default_message
215
+ input:
216
+ img_stack = expand(os.path.join(config['results_path'],'imgstack_{{fov}}_{z}.npy'),z=zs),
217
+ coord_file = expand(os.path.join(config['results_path'],'coord_{{fov}}_{z}.json'),z=zs)
218
+ output:
219
+ out=os.path.join(config['results_path'],'focus_report_{fov}.pdf'),
220
+ out_csvs = os.path.join(config['results_path'],'focus_report_{fov}.csv')
221
+ run:
222
+ reports.generate_focus_reports(input.img_stack,input.coord_file,output.out,output.out_csvs,wildcards.fov)
223
+
224
+ rule compile_focus_report:
225
+ threads:1
226
+ message: default_message
227
+ input:
228
+ files = expand(os.path.join(config['results_path'],'focus_report_{fov}.pdf'),fov=fovs),
229
+ csvs = expand(os.path.join(config['results_path'],'focus_report_{fov}.csv'),fov=fovs)
230
+ output:
231
+ combined = os.path.join(config['results_path'],'focus_report_all_fov.pdf'),
232
+ out = os.path.join(config['results_path'],'focus_report.t')
233
+ run:
234
+ reports.compile_focus_report(input.csvs,output.combined,irs,wvs)
235
+ shell("touch \"{output.out}\"")
236
+
237
+
238
+ rule decodability_report:
239
+ threads:1
240
+ message: default_message
241
+ input:
242
+ img_stack = os.path.join(config['results_path'],'deconvolved','deconvolved_{fov}_{z}.npy'),
243
+ coord_file = os.path.join(config['results_path'],'coord_{fov}_{z}.json'),
244
+ codebook_file = config["codebook_file"],
245
+ data_organization_file = config["data_org_file"]
246
+ output:
247
+ out=os.path.join(config['results_path'],'decodability_report_{fov}_{z}.pdf'),
248
+ out_stats = os.path.join(config['results_path'],'decodability_stats_{fov}_{z}.txt')
249
+ run:
250
+ reports.generate_decodability_reports(input.img_stack,input.coord_file,output.out,input.codebook_file,input.data_organization_file,wildcards.fov,wildcards.z,output.out_stats)
251
+
252
+
253
+ rule compile_decodability_report:
254
+ threads:1
255
+ message: default_message
256
+ input:
257
+ expand(os.path.join(config['results_path'],'decodability_report_{fov}_{z}.pdf'),fov=fovs,z=zs)
258
+ output:
259
+ os.path.join(config['results_path'],'decodability_report.t')
260
+ shell:
261
+ "touch \"{output}\""
data/config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "raw_data_path":"/Volumes/MERFISH_COLD/XP2059",
3
+ "results_path":"/Volumes/MERFISH_COLD/XP2059/1/1/2059_merFISH_report_results",
4
+ "raw_image_format": "{wv}nm, Raw/merFISH_{ir}_{fov}_{z}.TIFF",
5
+ "channel": [],
6
+ "fov": ["001"],
7
+ "ir": [],
8
+ "z": ["01","02","03"],
9
+ "isRemote":"N",
10
+ "azure_command":"azcopy cp \"{}\" \"{}\" --recursive",
11
+ "delete_stack":"N",
12
+ "codebook_file": "/Volumes/MERFISH_COLD/Codebooks/C1E1_codebook_shahid.csv",
13
+ "data_org_file":"/Volumes/MERFISH_COLD/data_organization/data_organization.csv"
14
+ }
data/debug_reports.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import numpy as np
4
+ from utils import fileIO,imgproc
5
+ import reports
6
+ import makereports
7
+ from pathlib import Path
8
+ import time
9
+ import re
10
+
11
+ coord_file = "config.json"
12
+ a_file = open(coord_file, "r")
13
+ config = json.load(a_file)
14
+
15
+
16
+ fov = '001'
17
+ z='02'
18
+
19
+ def download_azure():
20
+
21
+ if config["isRemote"]=="N" or os.path.exists(os.path.join(config['results_path'],'data')):
22
+ return None
23
+
24
+ os.system(config["azure_command"].format(config["raw_data_path"],os.path.join(config['results_path'],'data')))
25
+
26
+ return None
27
+
28
+ def check_params():
29
+
30
+ # Check to see if there are any params that need to be found
31
+ b_findz = len(config['z'])==0
32
+ b_findfov = len(config['fov'])==0
33
+ b_findir = len(config['ir'])==0
34
+ b_findwv = len(config['channel'])==0
35
+
36
+
37
+ # get all the images
38
+ if config["isRemote"] == "Y":
39
+ files = Path(os.path.join(config['results_path'],'data')).rglob('*.TIFF')
40
+ else:
41
+ files = Path(config['raw_data_path']).rglob('*.TIFF')
42
+ files = list(map(str,files))
43
+ # Find all channel, ir, fov, and z from the file names
44
+ re_filter = r"(.*)(\d{3})(?=nm).*(\B\d{2})\D(\B\d{3})\D(\B\d{2}\b)"
45
+ param_filter = re.compile(re_filter)
46
+ results = list(map(param_filter.search,files))
47
+
48
+ results =list(filter(lambda x: isinstance(x,re.Match), results))
49
+ # Get the list of parameters if they are needed
50
+ # Group 0 is the full match, so each capture group is 1 indexed
51
+ if b_findz:
52
+ zs = list(map(lambda x: x.group(5),results))
53
+ else:
54
+ zs = config['z']
55
+
56
+ if b_findir:
57
+ irs = list(map(lambda x: x.group(3),results))
58
+ else:
59
+ irs = config['ir']
60
+
61
+ if b_findfov:
62
+ fovs = list(map(lambda x: x.group(4),results))
63
+ else:
64
+ fovs = config['fov']
65
+
66
+ if b_findwv:
67
+ wvs = list(map(lambda x: x.group(2),results))
68
+ else:
69
+ wvs = config['channel']
70
+
71
+ if isinstance(results,list) and len(results)>0:
72
+ full_raw_path = results[0].group(1)
73
+ else:
74
+ full_raw_path=''
75
+
76
+
77
+ return sorted(list(set(zs)),key=int),sorted(list(set(fovs)),key=int),sorted(list(set(irs)),key=int),sorted(list(set(wvs)),key=int),full_raw_path
78
+
79
+
80
+ def check_dirs(files):
81
+ """Checks to see if the directories for the files in the list exist. If they dont, then make those directories
82
+
83
+ Args:
84
+ files (list[str]): the list of files whose directory paths to create. Needs to be the full, not relative paths
85
+
86
+ """
87
+ if not type(files) == list:
88
+ d = os.path.dirname(files)
89
+ if not os.path.isdir(d):
90
+ # print(files+'files' + d +'Does not exist')
91
+ os.makedirs(d)
92
+ else:
93
+ for f in files:
94
+
95
+ d = os.path.dirname(f)
96
+ if d != "" and not os.path.isdir(d):
97
+ # print(f+'files' + d +'Does not exist')
98
+ os.makedirs(d)
99
+
100
+ download_azure()
101
+ zs,fovs,irs,wvs,full_raw_path = check_params()
102
+
103
+ def create_image_stack():
104
+
105
+ out_file = os.path.join(config['results_path'],f'imgstack_{fov}_{z}.npy')
106
+ coord_file = os.path.join(config['results_path'],f'coord_{fov}_{z}.json')
107
+ check_dirs(out_file)
108
+
109
+ fileIO.create_image_stack(os.path.join(full_raw_path,config['raw_image_format']),fov,z,irs,wvs,out_file,coord_file)
110
+
111
+ def create_deconvolved_images():
112
+ in_file = os.path.join(config['results_path'],f'imgstack_{fov}_{z}.npy')
113
+
114
+ out_file = os.path.join(config['results_path'],'deconvolved',f'deconvolved_{fov}_{z}.npy')
115
+ check_dirs(out_file)
116
+ imgproc._deconvolute(in_file,out_file)
117
+
118
+ def create_brightness_report():
119
+
120
+ img_stack = os.path.join(config['results_path'],f'imgstack_{fov}_{z}.npy')
121
+ coord_file = os.path.join(config['results_path'],f'coord_{fov}_{z}.json')
122
+
123
+ out=os.path.join(config['results_path'],f'brightness_report_{fov}_{z}.pdf')
124
+ check_dirs(out)
125
+
126
+ makereports.brightness_worker(img_stack,coord_file,out,fov,z)
127
+
128
+ def create_focus_report():
129
+
130
+ img_stack = [os.path.join(config['results_path'],f'imgstack_{fov}_{z}.npy')]
131
+ coord_file = [os.path.join(config['results_path'],f'coord_{fov}_{z}.json')]
132
+
133
+ out=os.path.join(config['results_path'],f'focus_report_{fov}.pdf')
134
+ out_csv = os.path.join(config['results_path'],f'focus_report_{fov}.csv')
135
+ check_dirs(out)
136
+ makereports.focus_worker(img_stack,coord_file,out,out_csv,fov)
137
+
138
+
139
+ def compile_focus_reports():
140
+
141
+ in_files = [os.path.join(config['results_path'],f'focus_report_{fov}.csv')]
142
+ output = 'full_report_debug.csv'
143
+
144
+ makereports.compile_focus_report(in_files,output=output,irs=irs,wvs=wvs)
145
+
146
+
147
+ if __name__=='__main__':
148
+ start_time = time.time()
149
+
150
+ sub_start_time = time.time()
151
+ create_image_stack()
152
+ sub_end_time = time.time()
153
+ print(f'Image Stack: {sub_end_time-sub_start_time}')
154
+
155
+ sub_start_time = time.time()
156
+ create_deconvolved_images()
157
+ sub_end_time = time.time()
158
+ print(f'Deconvolved Stack: {sub_end_time-sub_start_time}')
159
+
160
+ sub_start_time = time.time()
161
+ create_brightness_report()
162
+ sub_end_time = time.time()
163
+ print(f'Brightness Report: {sub_end_time-sub_start_time}')
164
+
165
+ sub_start_time = time.time()
166
+ create_focus_report()
167
+ sub_end_time = time.time()
168
+ print(f'Focus Report: {sub_end_time-sub_start_time}')
169
+
170
+ sub_start_time = time.time()
171
+ compile_focus_reports()
172
+ sub_end_time = time.time()
173
+ print(f'Compiled Focus Report: {sub_end_time-sub_start_time}')
174
+
175
+ end_time = time.time()
176
+ print(f'Total Time: {end_time-start_time}')
data/environment.yml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: experiment_report
2
+ channels:
3
+ - conda-forge
4
+ - bioconda
5
+ dependencies:
6
+ - python=3.7
7
+ - snakemake
8
+ - matplotlib
9
+ - scikit-image
10
+ - numpy
11
+ - scikit-learn
data/imagej_macro/ImageJ_plugins/3D_suite/combinatoricslib-2.0.jar ADDED
Binary file (47.1 kB). View file
 
data/imagej_macro/ImageJ_plugins/3D_suite/droplet_finder.jar ADDED
Binary file (78.4 kB). View file
 
data/imagej_macro/ImageJ_plugins/3D_suite/imageware.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0f521c689ff44fbbea7f87b8125c9f86e12923388a88892ae6120c4062b511d
3
+ size 103743
data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d-core3.93.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d3b90de88264aa7c1969ccb4fd8c2c74768f5f8b437c55e4257dda154bc97e0
3
+ size 814496
data/imagej_macro/ImageJ_plugins/3D_suite/mcib3d_plugins3.93.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e265339b392cdfe621265430f7fb850a1bba60686ecdbb22d3b3be19e810a716
3
+ size 252590
data/imagej_macro/ImageJ_plugins/3D_viewer/.directory ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [Dolphin]
2
+ PreviewsShown=true
3
+ Timestamp=2016,7,1,12,4,53
4
+ Version=3
data/imagej_macro/ImageJ_plugins/3D_viewer/3D_Viewer-4.0.1.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47af1d1a50ebeed443523822f7622c5fc787273ae9dafd08ead2c760389d6862
3
+ size 548054
data/imagej_macro/ImageJ_plugins/3D_viewer/VIB-lib-2.1.1.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6ea31c1dfb7cd988534928269918dd744826e4904c3e7e913b5fba7cbaee582
3
+ size 635940
data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-linux-amd64.jar ADDED
Binary file (4.15 kB). View file
 
data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-macosx-universal.jar ADDED
Binary file (5.08 kB). View file
 
data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2-natives-windows-amd64.jar ADDED
Binary file (8.16 kB). View file
 
data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-2.3.2.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:084844543b18f7ff71b4c0437852bd22f0cb68d7e44c2c611c1bbea76f8c6fdf
3
+ size 345605
data/imagej_macro/ImageJ_plugins/3D_viewer/gluegen-rt-main-2.3.2.jar ADDED
Binary file (345 Bytes). View file
 
data/imagej_macro/ImageJ_plugins/3D_viewer/j3dcore-1.6.0-scijava-2.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53055934810716391f87fe5c0c9c73b9d5633baf3304a44a138d449a58908262
3
+ size 1944932
data/imagej_macro/ImageJ_plugins/3D_viewer/j3dutils-1.6.0-scijava-2.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95ccfcea340debcbd0e519dd65d556e36aba2e97cf8474c83fd2b1d03324a8f1
3
+ size 1047514
data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-linux-amd64.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82637302ae9effdf7d6f302e1050ad6aee3b13019914ddda5b502b9faa980216
3
+ size 224010
data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-macosx-universal.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef1ecb7ab2d900ba5df4eb8f44c7f9975031c19244afbdafc874ab85d82ad3c3
3
+ size 443876
data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2-natives-windows-amd64.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c53b1884cef19309d34fd10a94b010136d9d6de9a88c386f46006fb47acab5d
3
+ size 240721
data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-2.3.2.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e74603dc77b4183f108480279dbbf7fed3ac206069478636406c1fb45e83b31a
3
+ size 3414448
data/imagej_macro/ImageJ_plugins/3D_viewer/jogl-all-main-2.3.2.jar ADDED
Binary file (345 Bytes). View file
 
data/imagej_macro/ImageJ_plugins/3D_viewer/vecmath-1.6.0-scijava-2.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff4ede53f0fd6e25dc32f8139640a14f7222bebaae45fc4bced6f51d797b8fd
3
+ size 164203
data/imagej_macro/ImageJ_plugins/readme.txt ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Please copy all folders here to ImageJ/plugins/ folder and relaunch ImageJ platform
2
+
3
+ For update, you just need to replace TissueJ4Merfish.jar in the folder spatial3dtissuej_plugin/ by the most updated plugin and relaunch ImageJ.
4
+
5
+
6
+
7
+ Other notes:
8
+ For 3D viewer, you can copy 3dviewer plugins folder into your plugins directory
9
+ or install latest 3d viewer plugin from Fiij
10
+ Note: if the program do not work, the main issue may be due lack of supporting packages from 3D viewers. In this case, you can delete your current 3d viewer version, and replaced by the folder that I provided in this github
11
+
12
+ For Fiji_plugins.jar you can use the latest version in your local ImageJ
13
+
14
+ When launching ImageJ, platform will notice you a plugin with multiple versions in plugins folder, and overwrite it using one version. In this case, you can go into your local ImageJ/plugins folder, and remove one old version of plugin
15
+
16
+ Acknowledgement:
17
+ See more at:
18
+ https://mcib3d.frama.io/3d-suite-imagej/
19
+ J. Ollion, J. Cochennec, F. Loll, C. Escudé, T. Boudier. (2013) TANGO: A Generic Tool for High-throughput 3D Image Analysis for Studying Nuclear Organization. Bioinformatics 2013 Jul 15;29(14):1840-1.
20
+
21
+ The 3D suite would like to thank P. Andrey, J.-F. Gilles and the developers of the following plugins :
22
+
23
+ Imagescience
24
+ LocalThickness
25
+ ConvexHull3D
26
+ 3D Object Counter
27
+ Droplet Counter
28
+
29
+ Links
30
+ BoneJ
31
+ 3D Shapes
32
+ LabKit
33
+ 3D Viewer
34
+ MorphoLibJ
35
+ CLIJ
data/imagej_macro/ImageJ_plugins/spatial3dtissuej_plugin/TissueJ4Merfish_v14.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b4cbabea978a48df86ca4f3ace607854ca44c273a8a30cad872d4234f3807b7
3
+ size 841067
data/imagej_macro/ImageJ_plugins/utils/3D_Convex_Hull.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52c261d7c666b964ec6391a92739e7a65bc827e28a79983005e0883bf1741bb4
3
+ size 130741
data/imagej_macro/ImageJ_plugins/utils/Fiji_Plugins-3.1.1.jar ADDED
Binary file (97.4 kB). View file
 
data/imagej_macro/ImageJ_plugins/utils/SlideJ_.jar ADDED
Binary file (3.91 kB). View file
 
data/imagej_macro/ImageJ_plugins/utils/fiji-lib-2.1.2.jar ADDED
Binary file (92.9 kB). View file
 
data/imagej_macro/ImageJ_plugins/utils/imagescience.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6975dcc17d4d9e0dc618060e93266c816dc62764379fe46bae2f7c6eaecfe134
3
+ size 283410
data/imagej_macro/ImageJ_plugins/utils/mpicbg_-1.4.1.jar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1c962431c6f7998d3e263a32308471128dd050373603d7419954fba456ce859
3
+ size 138726
data/imagej_macro/ImageJ_plugins/utils/quickhull3d-1.0.0.jar ADDED
Binary file (31.5 kB). View file
 
data/imagej_macro/bleed_throught_validate/bleed_throught_macro.ijm ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //// How to use
2
+ //// Open macro from ImageJ menu
3
+ //// Change parameter setting here, and run entire macro
4
+ //// For input folder browser, choose the input image folder
5
+ //// It takes ~20 seconds to run this macro
6
+
7
+ print("\\Clear");
8
+
9
+
10
+ // Parameters setting
11
+ source_image="merFISH_02_007_01_wavelength_561.TIFF"; // change parameter here
12
+ target_image="merFISH_02_007_01_wavelength_647.TIFF"; // change parameter here
13
+ suffixe=".TIFF";
14
+
15
+
16
+
17
+
18
+
19
+
20
+ print("----------------------------------------------------------------");
21
+ //dir = getArgument;
22
+ dir=getDirectory("mouse"); // open a browser, allow you to choose input directory
23
+ //dir = "yourlocaldir/bleed_throught_validate/raw/";
24
+ // ex: dir="/Users/htran/Documents/storage_tmp/merfish_XP2059/bleed_throught_validate/raw/";
25
+ if (dir=="")
26
+ exit ("No argument!");
27
+
28
+ print("Working dir: "+dir+"\n");
29
+
30
+ results_dir=File.getParent(dir)+"/results/";
31
+ if(!File.exists(results_dir))
32
+ File.mkdir(results_dir);
33
+
34
+
35
+
36
+
37
+
38
+
39
+ short_name_source = substring(source_image,0,lastIndexOf(source_image,suffixe));
40
+ short_name_target = substring(target_image,0,lastIndexOf(target_image,suffixe));
41
+ IJ.log(short_name_source);
42
+ IJ.log(short_name_target);
43
+
44
+ // Source image first
45
+ print("Loading image: "+source_image);
46
+ open(dir+source_image);
47
+ selectWindow(source_image);
48
+ run("Enhance Contrast", "saturated=0.35");
49
+ if (bitDepth > 8) {run("8-bit");}
50
+ //run("Median...", "radius=2"); // in case you see lots of noises detected as signals
51
+ run("Convolve...", "text1=[-1 -1 -1 -1 -1\n-1 -1 -1 -1 -1\n-1 -1 24 -1 -1\n-1 -1 -1 -1 -1\n-1 -1 -1 -1 -1\n] normalize");
52
+
53
+
54
+ ////only signals with intensity values from 250 to 255 are considered as signals, from my observation of spots and noise in images.
55
+ ////You can use other thresholds, this macro just give an estimation, not provide accurate results for publication.
56
+ setThreshold(250, 255, "raw");
57
+ //setThreshold(250, 255);
58
+ setOption("BlackBackground", true);
59
+ run("Convert to Mask");
60
+ run("Grays");
61
+
62
+ selectWindow(source_image);
63
+ bin_source=short_name_source+"_BINARY"; //can open zip file from ImageJ to have tif format
64
+ saveAs("ZIP", results_dir+bin_source+".zip");
65
+ print("Deconvolution done!");
66
+
67
+
68
+
69
+
70
+ print("Loading image: "+target_image);
71
+ open(dir+target_image);
72
+ selectWindow(target_image);
73
+ run("Enhance Contrast", "saturated=0.35");
74
+ if (bitDepth > 8) {run("8-bit");}
75
+
76
+ // ATTENTION: in case you see lots of noises detected as signals, because this image contains large amount of noises --> need to use median filter here
77
+ // If you don't see lots of noise, please comment median filter.
78
+ run("Median...", "radius=2");
79
+
80
+ run("Convolve...", "text1=[-1 -1 -1 -1 -1\n-1 -1 -1 -1 -1\n-1 -1 24 -1 -1\n-1 -1 -1 -1 -1\n-1 -1 -1 -1 -1\n] normalize");
81
+ setThreshold(250, 255, "raw");
82
+ //setThreshold(250, 255);
83
+ setOption("BlackBackground", true);
84
+ run("Convert to Mask");
85
+ run("Grays");
86
+
87
+ selectWindow(target_image);
88
+ bin_target=short_name_target+"_BINARY"; //can open zip file from ImageJ to have tif format
89
+ saveAs("ZIP",results_dir+bin_target+".zip");
90
+ print("Deconvolution done!");
91
+
92
+
93
+
94
+
95
+
96
+
97
+ // histogram here
98
+
99
+ //// First, extracting signals that are overlapped in source and target images
100
+ imageCalculator("AND create", bin_source+".tif", bin_target+".tif");
101
+ selectWindow("Result of "+bin_source+".tif");
102
+ output_image="bleedthrought_signals"; //can open zip file from ImageJ to have tif format
103
+ saveAs("ZIP",results_dir+output_image+".zip");
104
+
105
+
106
+ //Counting number of spots in source image
107
+ selectWindow(bin_source+".tif");
108
+ nBins = 256;
109
+ getHistogram(values, counts, nBins);
110
+ source_spots=counts[255]; // number of spots
111
+ IJ.log("Source image is: "+source_image);
112
+ IJ.log("Number of spots in source image is: "+source_spots);
113
+
114
+
115
+ //Counting number of spots in bleedthrought image
116
+ selectWindow(output_image+".tif");
117
+ getHistogram(values, counts, nBins);
118
+ bleedthrought_spots=counts[255]; // number of spots
119
+ IJ.log("Number of spots that bleed throught other wavelength channel is: "+bleedthrought_spots);
120
+
121
+
122
+ //Counting number of spots in target image
123
+ selectWindow(bin_target+".tif");
124
+ getHistogram(values, counts, nBins);
125
+ target_spots=counts[255]; // number of spots
126
+ IJ.log("Target image is: "+target_image);
127
+ IJ.log("Number of spots in target image is: "+target_spots);
128
+
129
+
130
+ pct_bleed=100*bleedthrought_spots/source_spots;
131
+ IJ.log("Percentage of bleed throught is: "+pct_bleed);
132
+
133
+ pct_bleed_target=100*bleedthrought_spots/target_spots;
134
+ IJ.log("Percentage of bleed throught is: "+pct_bleed_target);
135
+
136
+
137
+ // Save results into a csv file
138
+ setResult("source_img", 0, source_image);
139
+ setResult("target_img", 0, target_image);
140
+ setResult("pct_bleedthrought_source", 0, pct_bleed);
141
+ setResult("pct_bleedthrought_target", 0, pct_bleed_target);
142
+ updateResults();
143
+
144
+ selectWindow("Results");
145
+ saveAs("Results",results_dir+"bleed_throught_report.csv");
146
+ selectWindow("Results");
147
+ run("Close");
148
+ print("Save output into the folder: "+results_dir);
149
+
150
+ run("Close All");
151
+
152
+ print("Completed");
153
+ print("----------------------------------------------------------------");
154
+
155
+
data/imagej_macro/bleed_throught_validate/raw/merFISH_02_007_01_wavelength_561.TIFF ADDED

Git LFS Details

  • SHA256: 224c7e7ed6eb8e4a3dc655412cab88971a255dcc72d43433a0fe746656f20260
  • Pointer size: 132 Bytes
  • Size of remote file: 5.17 MB
data/imagej_macro/bleed_throught_validate/raw/merFISH_02_007_01_wavelength_647.TIFF ADDED

Git LFS Details

  • SHA256: 8d1ab714c0baf8900a2b8e2f6e5d76b2a3308f16ed301567b7636ee55d40225f
  • Pointer size: 132 Bytes
  • Size of remote file: 5.17 MB
data/imagej_macro/bleed_throught_validate/results/bleed_throught_report.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ ,source_img,target_img,pct_bleedthrought_source,pct_bleedthrought_target
2
+ 1,merFISH_02_007_01_wavelength_561.TIFF,merFISH_02_007_01_wavelength_647.TIFF,0.258,17.386
data/imagej_macro/bleed_throught_validate/results/bleedthrought_signals.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169ff3492e5cff7155db973c062af8d630354b5898c2609de67e47438628b843
3
+ size 3099
data/imagej_macro/bleed_throught_validate/results/merFISH_02_007_01_wavelength_561_BINARY.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cf64e0aa94e2cd897db6da10ae9e84cd84c700b8539e7f97fc947b84ef10712
3
+ size 72243
data/imagej_macro/bleed_throught_validate/results/merFISH_02_007_01_wavelength_647_BINARY.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81d81d26f2259ef6cb28be1943b649b131f70f8683b25cb57c3b5bc9eeaf202d
3
+ size 3953
data/imagej_macro/blur_detector/dataset1/merFISH_01_025_05.TIFF ADDED

Git LFS Details

  • SHA256: 87d350e4df369288588e2e235342641861845a827a7893a9e3bb21d6a4715b05
  • Pointer size: 131 Bytes
  • Size of remote file: 500 kB
data/imagej_macro/blur_detector/dataset1/merFISH_08_025_05.TIFF ADDED

Git LFS Details

  • SHA256: b165e1ea8c361cad39638197ffccb95ab68de06223b6bf86e8ed9b186e811ead
  • Pointer size: 131 Bytes
  • Size of remote file: 500 kB
data/imagej_macro/blur_detector/dataset2/merFISH_05_025_05.TIFF ADDED

Git LFS Details

  • SHA256: a7ab993b728c287b4506bfe2792f724fa8bca5632c47e88aba807c3b317d27cb
  • Pointer size: 131 Bytes
  • Size of remote file: 500 kB
data/imagej_macro/blur_detector/dataset2/merFISH_06_025_05.TIFF ADDED

Git LFS Details

  • SHA256: d753615bc135dd70608e3bd09fbca6caa56e18eb97797f9bb26ff527b677378d
  • Pointer size: 131 Bytes
  • Size of remote file: 500 kB
data/imagej_macro/blur_detector/detecting_blur_image.R ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ suppressPackageStartupMessages({
2
+ # library(tidyverse)
3
+ library(dplyr)
4
+ library(ggplot2)
5
+ # library(igraph)
6
+ # library(ggraph)
7
+ library(data.table)
8
+ library(RColorBrewer)
9
+ })
10
+
11
+
12
+ read_input_data <- function(input_dir, total_pixels){
13
+ fns <- list.files(input_dir)
14
+ fns <- fns[grepl('.csv',fns)]
15
+ print("Number of csv histogram files: ")
16
+ print(length(fns))
17
+ good_signals_intensity_thrs <- 5
18
+ summary_stat <- tibble::tibble()
19
+ for(fn in fns){
20
+ df <- data.table::fread(paste0(input_dir, fn))
21
+ nbOverExp <- df %>%
22
+ dplyr::filter(intensity_val=='greater_30') %>%
23
+ dplyr::pull(counts)
24
+ total_counts <- df %>%
25
+ dplyr::filter(intensity_val!='greater_30')%>%
26
+ dplyr::mutate(intensity_val=as.numeric(intensity_val)) %>%
27
+ dplyr::filter(intensity_val>good_signals_intensity_thrs)%>%
28
+ dplyr::summarise(total_counts=sum(counts)) %>%
29
+ dplyr::pull(total_counts)
30
+
31
+ stat <- tibble::tibble(image_fn=fn,
32
+ pct_signals=round(100*total_counts/total_pixels,2),
33
+ nb_overExp=nbOverExp)
34
+ summary_stat <- dplyr::bind_rows(summary_stat, stat)
35
+
36
+
37
+ }
38
+ return(summary_stat)
39
+ }
40
+
41
+ get_stat <- function(summary_stat, datatag, save_dir){
42
+ # fn <- 'merFISH_02_006_05_histogram.csv'
43
+ # View(df)
44
+ outliers <- tibble::tibble()
45
+ dim(summary_stat)
46
+ summary_stat$pct_overExp <- round(100*summary_stat$nb_overExp/total_pixels,3)
47
+ # summary(summary_stat$pct_signals)
48
+ # summary(summary_stat$pct_overExp)
49
+ # head(summary_stat)
50
+
51
+ ## Detecting all outliers
52
+ outliers_top_thrs <- 0.95
53
+ outliers_top_FOVs <- summary_stat %>%
54
+ dplyr::filter(pct_signals > quantile(pct_signals, outliers_top_thrs))
55
+ print('Amount of outliers with large number of signals: ')
56
+ print(dim(outliers_top_FOVs))
57
+ outliers_top_FOVs$desc <- 'high_pct_signals'
58
+ outliers <- dplyr::bind_rows(outliers, outliers_top_FOVs)
59
+
60
+ outliers_bottom_thrs <- 0.05
61
+ outliers_bottom_FOVs <- summary_stat %>%
62
+ dplyr::filter(pct_signals < quantile(pct_signals, outliers_bottom_thrs))
63
+ print('Amount of outliers with low signals: ')
64
+ print(dim(outliers_bottom_FOVs))
65
+ outliers_bottom_FOVs$desc <- 'low_pct_signals'
66
+ outliers <- dplyr::bind_rows(outliers, outliers_bottom_FOVs)
67
+
68
+ artifact_thres <- 0.3
69
+ outliers_artifacts <- summary_stat %>%
70
+ dplyr::filter(pct_overExp>=artifact_thres)
71
+ outliers_artifacts$desc <- 'contain_artifacts'
72
+ outliers <- dplyr::bind_rows(outliers, outliers_artifacts)
73
+ print('Amount of artifacts: ')
74
+ print(dim(outliers_artifacts))
75
+ data.table::fwrite(outliers, paste0(save_dir, datatag, '_outliers.csv'))
76
+
77
+ }
78
+
79
+
80
+ total_pixels = 1608 * 1608
81
+ datatag <- 'XP2509'
82
+ input_dir <- '/Users/htran/Documents/merfish_temp_storage/testing/results/'
83
+ summary_stat <- read_input_data(input_dir, total_pixels)
84
+ save_dir <- input_dir
85
+ get_stat(summary_stat, datatag, save_dir)
data/imagej_macro/blur_detector/execute_blur_detector_dataset1.sh ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ # batch macro input_filename
4
+ # Ref: page 20: https://imagej.nih.gov/ij/docs/macro_reference_guide.pdf
5
+
6
+ ## Configuration MacOS
7
+ imagej_dir="/Applications/ImageJ.app" ## MacOS ImageJ java run folder
8
+ macro_dir="/Users/hoatran/Documents/BCCRC_projects/merfish/datasets_reduced_size/blur_detector/"
9
+ project_dir="/Users/hoatran/Documents/BCCRC_projects/merfish/datasets_reduced_size/blur_detector/"
10
+
11
+ series="dataset1/"
12
+ task="blur_detector1"
13
+
14
+ ## ImageJ/ Fiji can be installed to Applications, or put into any folder in your drive.
15
+ # imagej_dir="/Applications/ImageJ.app" ## MacOS ImageJ java run folder
16
+ # imagej_dir="/Applications/Fiji.app" ## MacOS Fiji java run folder
17
+ # imagej_dir="/Users/htran/Downloads/Fiji.app/" ## MacOS Fiji java run folder - from my computer
18
+
19
+
20
+ # imagej_exe_file=${imagej_dir}jars/ij-1.53q.jar ## Fiji file location, in general file name is ij.jar, sometimes include version here
21
+ imagej_exe_file=${imagej_dir}/Contents/Java/ij.jar ## ImageJ exe file location, in general file name is ij.jar, sometimes include version here
22
+
23
+
24
+ macro_fn="${macro_dir}macro_blur_detector.ijm"
25
+
26
+
27
+
28
+ log_file="${macro_dir}${task}.log"
29
+ echo $log_file
30
+ exec >> $log_file 2>&1 && tail $log_file
31
+
32
+ input_dir="${project_dir}${series}"
33
+ echo "__________________________________\n"
34
+ echo "Input directory is: \n"
35
+ echo $input_dir
36
+ echo "Blur detector \n"
37
+ ## You can change memory amount, ex: 20000m to 30000m so program will run faster.
38
+ ## MacOS background mode here
39
+
40
+ ## If you have java environment jre installed in your computer
41
+ java -Xmx10000m -jar ${imagej_exe_file} -ijpath $imagej_dir/ -batch $macro_fn $input_dir
42
+
43
+ ## Otherwise using existing jre env from Fiji here
44
+ # /Users/htran/Downloads/Fiji.app/java/macosx/adoptopenjdk-8.jdk/jre/Contents/Home/bin/java -Xmx20000m -jar $imagej_dir/Contents/Java/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
45
+ # ${imagej_dir}java/macosx/adoptopenjdk-8.jdk/jre/Contents/Home/bin/java -Xmx20000m -jar ${imagej_exe_file} -ijpath $imagej_dir/ -batch $macro_fn $input_dir
46
+
47
+ ## Linux background mode here, using xvfb-run in case you run in server (graphical env), in local computer, java command is enough
48
+ # xvfb-run -a java -Xmx15000m -jar $imagej_dir/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
49
+
50
+ ## In Linux local computer, java command is sufficient
51
+ # java -Xmx15000m -jar $imagej_dir/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
52
+
53
+ echo "Nucleus Segmentation Completed!"
54
+ echo "__________________________________\n"
55
+
56
+
57
+
58
+
59
+
data/imagej_macro/blur_detector/execute_blur_detector_dataset2.sh ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ # batch macro input_filename
4
+ # Ref: page 20: https://imagej.nih.gov/ij/docs/macro_reference_guide.pdf
5
+
6
+ ## Configuration MacOS
7
+ imagej_dir="/Applications/ImageJ.app" ## MacOS ImageJ java run folder
8
+ macro_dir="/Users/hoatran/Documents/BCCRC_projects/merfish/datasets_reduced_size/blur_detector/"
9
+ project_dir="/Users/hoatran/Documents/BCCRC_projects/merfish/datasets_reduced_size/blur_detector/"
10
+
11
+ series="dataset2/"
12
+ task="blur_detector2"
13
+
14
+ ## ImageJ/ Fiji can be installed to Applications, or put into any folder in your drive.
15
+ # imagej_dir="/Applications/ImageJ.app" ## MacOS ImageJ java run folder
16
+ # imagej_dir="/Applications/Fiji.app" ## MacOS Fiji java run folder
17
+ # imagej_dir="/Users/htran/Downloads/Fiji.app/" ## MacOS Fiji java run folder - from my computer
18
+
19
+
20
+ # imagej_exe_file=${imagej_dir}jars/ij-1.53q.jar ## Fiji file location, in general file name is ij.jar, sometimes include version here
21
+ imagej_exe_file=${imagej_dir}/Contents/Java/ij.jar ## ImageJ exe file location, in general file name is ij.jar, sometimes include version here
22
+
23
+
24
+ macro_fn="${macro_dir}macro_blur_detector.ijm"
25
+
26
+
27
+
28
+ log_file="${macro_dir}${task}.log"
29
+ echo $log_file
30
+ exec >> $log_file 2>&1 && tail $log_file
31
+
32
+ input_dir="${project_dir}${series}"
33
+ echo "__________________________________\n"
34
+ echo "Input directory is: \n"
35
+ echo $input_dir
36
+ echo "Blur detector \n"
37
+ ## You can change memory amount, ex: 20000m to 30000m so program will run faster.
38
+ ## MacOS background mode here
39
+
40
+ ## If you have java environment jre installed in your computer
41
+ java -Xmx10000m -jar ${imagej_exe_file} -ijpath $imagej_dir/ -batch $macro_fn $input_dir
42
+
43
+ ## Otherwise using existing jre env from Fiji here
44
+ # /Users/htran/Downloads/Fiji.app/java/macosx/adoptopenjdk-8.jdk/jre/Contents/Home/bin/java -Xmx20000m -jar $imagej_dir/Contents/Java/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
45
+ # ${imagej_dir}java/macosx/adoptopenjdk-8.jdk/jre/Contents/Home/bin/java -Xmx20000m -jar ${imagej_exe_file} -ijpath $imagej_dir/ -batch $macro_fn $input_dir
46
+
47
+ ## Linux background mode here, using xvfb-run in case you run in server (graphical env), in local computer, java command is enough
48
+ # xvfb-run -a java -Xmx15000m -jar $imagej_dir/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
49
+
50
+ ## In Linux local computer, java command is sufficient
51
+ # java -Xmx15000m -jar $imagej_dir/ij.jar -ijpath $imagej_dir/ -batch $macro_fn $input_dir
52
+
53
+ echo "Nucleus Segmentation Completed!"
54
+ echo "__________________________________\n"
55
+
56
+
57
+
58
+
59
+