-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpipeline.py
executable file
·311 lines (253 loc) · 8.14 KB
/
pipeline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
#!/usr/bin/env python3
"""Pipeline for parsing DICOM and i-contour files"""
# Set to True for verbose output
DEBUG = False
# Absolute or relative path to data directory
DATA_DIR = 'final_data'
# Name of csv file containing links between dicoms and contours
LINK_FNAME = 'link.csv'
# Name of column in link file containing DICOM names
DICOM_KEY = 'patient_id'
# Name of column in link file containing contour names
CONTOUR_KEY = 'original_id'
# Alpha transparency of mask overlaid on DICOM
MASK_OVERLAY_ALPHA = 0.25
# Number of image/target pairs to load per batch
BATCH_SIZE = 8
import argparse
import csv
import numpy as np
import logging
import multiprocessing as mp
import sys
import time
from matplotlib import pyplot as plt
from os import listdir, getpid
from os.path import isfile, join
from pprint import pformat
from random import shuffle, seed
from parsing import parse_contour_file, parse_dicom_file, poly_to_mask
_log_format = '%(asctime)s : %(name)s : %(levelname)s : %(message)s'
logging.basicConfig(format=_log_format)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
DICOM_DIR = join(DATA_DIR, 'dicoms')
CONTOUR_DIR = join(DATA_DIR, 'contourfiles')
#########
# Part 1
#########
def get_dicom_mask_tups(
dicom_dir=DICOM_DIR,
contour_dir=CONTOUR_DIR,
):
'''Returns an iterator over image/mask tuples loaded from the given paths'''
path_tups = _get_dicom_contour_path_tups(dicom_dir, contour_dir)
yield from _load_dicom_contour_paths(path_tups)
#########
# Part 2
#########
class BatchFeeder(object):
'''Load batches one by one in a separate process'''
def __init__(self,
dicom_dir=DICOM_DIR,
contour_dir=CONTOUR_DIR,
batch_size=BATCH_SIZE,
):
self._batch_num = 0
self._q = mp.Manager().Queue()
path_tups = _get_dicom_contour_path_tups(
dicom_dir, contour_dir, randomize=True)
self._path_tups_chunked = np.array_split(path_tups, batch_size)
# pre-load first batch
self._have_next_batch = self._load_next_batch()
def _load_batch(self, path_tups):
'''Load the files contained in the given paths'''
pid = getpid()
logger.debug('worker %d loading batch of len %s' % (pid, len(path_tups)))
gen = _load_dicom_contour_paths(path_tups)
images, targets = [], []
for image, target in gen:
images.append(image)
targets.append(target)
logger.debug('worker %s batch loaded' % pid)
self._q.put((np.array(images), np.array(targets)))
def _load_next_batch(self):
'''Start a process to load the next batch'''
if self._batch_num < len(self._path_tups_chunked):
path_tups = self._path_tups_chunked[self._batch_num]
p = mp.Process(target=self._load_batch, args=(path_tups,))
p.start()
self._batch_num += 1
return self._batch_num < len(self._path_tups_chunked)
def get_next_batch(self):
'''Start loading next batch, and return previously loaded batch'''
while self._have_next_batch:
self._have_next_batch = self._load_next_batch()
yield self._q.get()
# we should have one more result waiting to be yielded
yield self._q.get()
raise StopIteration
###################
# Helper functions
###################
def _get_cid_by_did(
link_path,
dicom_key=DICOM_KEY,
contour_key=CONTOUR_KEY,
):
'''Read links between dicoms and contours'''
cid_by_did = {}
with open(link_path, 'r') as f:
reader = csv.DictReader(f)
for row in reader:
dicom_id = row[dicom_key]
contour_id = row[contour_key]
cid_by_did[dicom_id] = contour_id
logger.debug('cid_by_did: %s' % cid_by_did)
return cid_by_did
def _get_path_by_id(base_path, get_id_func):
'''Return a dict of id: path under the given base path'''
logger.debug('base_path: %s' % base_path)
path_by_id = {}
for f in listdir(base_path):
f_path = join(base_path, f)
if not isfile(f_path):
continue
try:
_id = get_id_func(f)
except:
logger.warn('Unable to parse id for file %s' % f)
continue
logger.debug('id: %s, f_path: %s' % (_id, f_path))
if _id in path_by_id:
logger.warn('duplicate _id %s' % _id)
continue
path_by_id[_id] = f_path
return path_by_id
def _get_dicom_contour_path_tups(
dicom_dir,
contour_dir,
link_path=join(DATA_DIR, LINK_FNAME),
randomize=False,
random_seed=None,
):
'''Returns a list of dicom/contour path tuples'''
cid_by_did = _get_cid_by_did(link_path)
path_tups = []
for d_id in sorted(cid_by_did.keys()):
c_id = cid_by_did[d_id]
# read dicom paths
dicom_path_base = join(dicom_dir, d_id)
dicom_path_by_id = _get_path_by_id(
dicom_path_base,
lambda f: int(f.split('.')[0])
)
# read contour paths
contour_path_base = join(contour_dir, c_id, 'i-contours')
contour_path_by_id = _get_path_by_id(
contour_path_base,
lambda f: int(f.split('-')[2])
)
# pair up common ids
dicom_ids = set(dicom_path_by_id.keys())
contour_ids = set(contour_path_by_id.keys())
common_ids = sorted(list(dicom_ids.intersection(contour_ids)))
logger.debug('common_ids: %s' % common_ids)
for _id in common_ids:
dicom_path = dicom_path_by_id[_id]
contour_path = contour_path_by_id[_id]
path_tups.append((dicom_path, contour_path))
# shuffle
if randomize:
if random_seed is not None:
logger.debug('setting random seed: %s' % random_seed)
seed(random_seed)
shuffle(path_tups)
logger.debug('path_tups:\n%s' % pformat(path_tups))
return path_tups
def _make_masked_dicom(i, dicom, mask, show=False, save=False):
if not (show or save):
return
ax = plt.subplot(1,3,1)
plt.imshow(dicom)
ax.set_title('DICOM')
ax = plt.subplot(1,3,2)
plt.imshow(mask)
ax.set_title('Mask')
ax = plt.subplot(1,3,3)
plt.imshow(dicom)
plt.imshow(mask, alpha=MASK_OVERLAY_ALPHA)
ax.set_title('Overlay')
if show:
logger.info('Displaying masked dicom %s' % i)
plt.show()
if save:
filename = 'tmp_%04d.png' % i
logger.info('Saving masked dicom %s' % filename)
plt.savefig(filename)
def _load_dicom_contour_paths(path_tups):
'''Returns an iterator over image/mask tuples loaded from the given paths'''
for i, (dicom_path, contour_path) in enumerate(path_tups):
# read dicom data
logger.debug('loading dicom_path: %s' % dicom_path)
dcm_dict = parse_dicom_file(dicom_path)
dicom = dcm_dict['pixel_data']
width, height = dicom.shape
# read contour data
logger.debug('loading contour_path: %s' % contour_path)
coords_list = parse_contour_file(contour_path)
mask = poly_to_mask(coords_list, width, height)
yield (dicom, mask)
def run_part_1(show_masked_dicoms=False, save_masked_dicoms=False):
tups = get_dicom_mask_tups()
for i, (dicom, mask) in enumerate(tups):
logger.info('iteration %d, dicom.shape: %s, mask.shape: %s' % (
i, dicom.shape, mask.shape))
_make_masked_dicom(i, dicom, mask,
show=show_masked_dicoms,
save=save_masked_dicoms)
def run_part_2(show_masked_dicoms=False, save_masked_dicoms=False):
batch_feeder = BatchFeeder()
for i, (dicom, mask) in enumerate(batch_feeder.get_next_batch()):
logger.info('iteration %d, dicom.shape: %s, mask.shape: %s' % (
i, dicom.shape, mask.shape))
N = dicom.shape[0]
for j in range(N):
d = dicom[j,:,:]
m = mask[j,:,:]
_make_masked_dicom(i*N + j, d, m,
show=show_masked_dicoms,
save=save_masked_dicoms)
# simulate training
logger.info('training...')
time.sleep(1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-a',
help='Run part 1',
action='store_true')
parser.add_argument(
'-b',
help='Run part 2',
action='store_true')
parser.add_argument(
'-s',
help='Show masked dicoms',
action='store_true')
parser.add_argument(
'-w',
help='Write masked dicoms to disk',
action='store_true')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
show = args.s
save = args.w
if args.a:
run_part_1(show, save)
if args.b:
run_part_2(show, save)
if __name__ == '__main__':
main()