diff --git a/.gitignore b/.gitignore index 0010388..0a34032 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,4 @@ -*.pickle -*.ipynb_checkpoints* -*.pyc -*.pkl -*.log -*.png -*.jpg -__pycache__/ - +**__pycache__ +**/__pycache__/* +.idea/** +**/experiments/* diff --git a/CodeDoc.odt b/CodeDoc.odt deleted file mode 100644 index 880f0c7..0000000 Binary files a/CodeDoc.odt and /dev/null differ diff --git a/assets/annotations.png b/assets/annotations.png new file mode 100644 index 0000000..bf615eb Binary files /dev/null and b/assets/annotations.png differ diff --git a/assets/baseline_figure.png b/assets/baseline_figure.png new file mode 100644 index 0000000..2e6f71c Binary files /dev/null and b/assets/baseline_figure.png differ diff --git a/assets/hist_example.png b/assets/hist_example.png new file mode 100644 index 0000000..26ccea9 Binary files /dev/null and b/assets/hist_example.png differ diff --git a/assets/prediction_pipeline.png b/assets/prediction_pipeline.png new file mode 100644 index 0000000..1ed3a03 Binary files /dev/null and b/assets/prediction_pipeline.png differ diff --git a/assets/regrcnn.png b/assets/regrcnn.png new file mode 100644 index 0000000..93a15d3 Binary files /dev/null and b/assets/regrcnn.png differ diff --git a/assets/retu_figure.png b/assets/retu_figure.png new file mode 100644 index 0000000..cb1348f Binary files /dev/null and b/assets/retu_figure.png differ diff --git a/assets/teaser.png b/assets/teaser.png new file mode 100644 index 0000000..6b36da5 Binary files /dev/null and b/assets/teaser.png differ diff --git a/assets/toy_readme.png b/assets/toy_readme.png new file mode 100644 index 0000000..a0c61b6 Binary files /dev/null and b/assets/toy_readme.png differ diff --git a/assets/train_gen.png b/assets/train_gen.png new file mode 100644 index 0000000..f575dd1 Binary files /dev/null and b/assets/train_gen.png differ diff --git a/assets/wcs_hists.png b/assets/wcs_hists.png new file mode 100644 index 0000000..4565a57 Binary files /dev/null and b/assets/wcs_hists.png differ diff --git a/assets/wcs_readme.png b/assets/wcs_readme.png new file mode 100644 index 0000000..99384e1 Binary files /dev/null and b/assets/wcs_readme.png differ diff --git a/assets/wcs_sketch.png b/assets/wcs_sketch.png new file mode 100644 index 0000000..919d1ef Binary files /dev/null and b/assets/wcs_sketch.png differ diff --git a/assets/wcs_text.png b/assets/wcs_text.png new file mode 100644 index 0000000..75764a5 Binary files /dev/null and b/assets/wcs_text.png differ diff --git a/code_optim/code_optim.py b/code_optim/code_optim.py deleted file mode 100644 index 2702b3c..0000000 --- a/code_optim/code_optim.py +++ /dev/null @@ -1,328 +0,0 @@ -""" -Created at 04/02/19 13:50 -@author: gregor -""" -import plotting as plg - -import sys -import os -import pickle -import json, socket, subprocess, time, threading - -import numpy as np -import pandas as pd -import torch -from collections import OrderedDict -from matplotlib.lines import Line2D - -import utils.exp_utils as utils -import utils.model_utils as mutils -from predictor import Predictor -from evaluator import Evaluator - - -""" -Need to start this script as sudo for background logging thread to work (needs to set niceness<0) -""" - - -def measure_train_batch_loading(logger, batch_gen, iters=1, warm_up=20, is_val=False, out_dir=None): - torch.cuda.empty_cache() - timer_key = "val_fw" if is_val else "train_fw" - for i in range(warm_up): - batch = next(batch_gen) - print("\rloaded warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time(timer_key) - batch = next(batch_gen) - print("\r{} batch {} loading took {:.3f}s.".format("val" if is_val else "train", i+1, - logger.time(timer_key)), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time(timer_key)/iters)) - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1]) > 0, "train loading: empty df" - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join( - out_dir,"{}_loading.pickle".format("val" if is_val else "train"))) - return logger.sysmetrics[sysmetrics_start_ix:-1] - - -def measure_RPN(logger, net, batch, iters=1, warm_up=20, out_dir=None): - torch.cuda.empty_cache() - data = torch.from_numpy(batch["data"]).float().cuda() - fpn_outs = net.fpn(data) - rpn_feature_maps = [fpn_outs[i] for i in net.cf.pyramid_levels] - - for i in range(warm_up): - layer_outputs = [net.rpn(p_feats) for p_feats in rpn_feature_maps] - print("\rfinished warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time("RPN_fw") - layer_outputs = [net.rpn(p_feats) for p_feats in rpn_feature_maps] - print("\r{} batch took {:.3f}s.".format("RPN", logger.time("RPN_fw")), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time("RPN_fw")/iters)) - - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1])>0, "six {}, sysm ix {}".format(sysmetrics_start_ix, logger.sysmetrics.index) - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join(out_dir,"RPN_msrmts.pickle")) - return logger.sysmetrics[sysmetrics_start_ix:-1] - -def measure_FPN(logger, net, batch, iters=1, warm_up=20, out_dir=None): - torch.cuda.empty_cache() - data = torch.from_numpy(batch["data"]).float().cuda() - for i in range(warm_up): - outputs = net.fpn(data) - print("\rfinished warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time("FPN_fw") - outputs = net.fpn(data) - #print("in mean thread", logger.sysmetrics.index) - print("\r{} batch took {:.3f}s.".format("FPN", logger.time("FPN_fw")), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time("FPN_fw")/iters)) - - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1])>0, "six {}, sysm ix {}".format(sysmetrics_start_ix, logger.sysmetrics.index) - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join(out_dir,"FPN_msrmts.pickle")) - return logger.sysmetrics[sysmetrics_start_ix:-1] - -def measure_forward(logger, net, batch, iters=1, warm_up=20, out_dir=None): - torch.cuda.empty_cache() - data = torch.from_numpy(batch["data"]).float().cuda() - for i in range(warm_up): - outputs = net.forward(data) - print("\rfinished warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time("net_fw") - outputs = net.forward(data) - print("\r{} batch took {:.3f}s.".format("forward", logger.time("net_fw")), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time("net_fw")/iters)) - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1]) > 0, "fw: empty df" - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join(out_dir,"fw_msrmts.pickle")) - return logger.sysmetrics[sysmetrics_start_ix:-1].copy() - -def measure_train_forward(logger, net, batch, iters=1, warm_up=20, is_val=False, out_dir=None): - torch.cuda.empty_cache() - timer_key = "val_fw" if is_val else "train_fw" - optimizer = torch.optim.Adam(net.parameters(), lr=cf.learning_rate[0], weight_decay=cf.weight_decay) - for i in range(warm_up): - results_dict = net.train_forward(batch) - print("\rfinished warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time(timer_key) - if not is_val: - optimizer.zero_grad() - results_dict = net.train_forward(batch, is_validation=is_val) - #results_dict["torch_loss"] *= torch.rand(1).cuda() - if not is_val: - results_dict["torch_loss"].backward() - optimizer.step() - print("\r{} batch took {:.3f}s.".format("val" if is_val else "train", logger.time(timer_key)), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time(timer_key)/iters)) - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1]) > 0, "train_fw: empty df" - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join( - out_dir,"{}_msrmts.pickle".format("val_fw" if is_val else "train_fwbw"))) - return logger.sysmetrics[sysmetrics_start_ix:-1].copy() - -def measure_train_fw_incl_batch_gen(logger, net, batch_gen, iters=1, warm_up=20, is_val=False, out_dir=None): - torch.cuda.empty_cache() - timer_key = "val_fw" if is_val else "train_fw" - for i in range(warm_up): - batch = next(batch_gen) - results_dict = net.train_forward(batch) - print("\rfinished warm-up batch {}/{}".format(i+1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time(timer_key) - batch = next(batch_gen) - results_dict = net.train_forward(batch, is_validation=is_val) - if not is_val: - results_dict["torch_loss"].backward() - print("\r{} batch took {:.3f}s.".format("val" if is_val else "train", logger.time(timer_key)), end="", flush=True) - print("Total avg fw {:.2f}s".format(logger.get_time(timer_key)/iters)) - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1]) > 0, "train_fw incl batch: empty df" - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join( - out_dir,"{}_incl_batch_msrmts.pickle".format("val_fw" if is_val else "train_fwbw"))) - return logger.sysmetrics[sysmetrics_start_ix:-1] - - - -def measure_train_backward(cf, logger, net, batch, iters=1, warm_up=20, out_dir=None): - torch.cuda.empty_cache() - optimizer = torch.optim.Adam(net.parameters(), lr=cf.learning_rate[0], weight_decay=cf.weight_decay) - results_dict = net.train_forward(batch, is_validation=False) - loss = results_dict["torch_loss"] - for i in range(warm_up): - loss.backward(retain_graph=True) - print("\rfinished warm-up batch {}/{}".format(i + 1, warm_up), end="", flush=True) - sysmetrics_start_ix = len(logger.sysmetrics.index) - for i in range(iters): - logger.time("train_bw") - optimizer.zero_grad() - loss.backward(retain_graph=True) - optimizer.step() - print("\r{} bw batch {} took {:.3f}s.".format("train", i+1, logger.time("train_bw")), end="", flush=True) - print("Total avg bw {:.2f}s".format(logger.get_time("train_bw") / iters)) - if out_dir is not None: - assert len(logger.sysmetrics[sysmetrics_start_ix:-1]) > 0, "train_bw: empty df" - logger.sysmetrics[sysmetrics_start_ix:-1].to_pickle(os.path.join(out_dir,"train_bw.pickle")) - return logger.sysmetrics[sysmetrics_start_ix:-1] - - - -def measure_test_forward(logger, net, batch, iters=1, return_masks=False): - torch.cuda.empty_cache() - for i in range(iters): - logger.time("test_fw") - results_dict = net.test_forward(batch, return_masks=return_masks) - print("\rtest batch took {:.3f}s.".format(logger.time("test_fw")), end="", flush=True) - print("Total avg test fw {:.2f}s".format(logger.get_time('test_fw')/iters)) - - -def perform_measurements(args, iters=20): - - cf = utils.prep_exp(args.dataset_name, args.exp_dir, args.server_env, is_training=True, use_stored_settings=False) - - cf.exp_dir = args.exp_dir - - # pid = 1624 - # cf.fold = find_pid_in_splits(pid) - cf.fold = 0 - cf.merge_2D_to_3D_preds = False - cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(cf.fold)) - - logger = utils.get_logger(cf.exp_dir, sysmetrics_interval=0.5) - model = utils.import_module('model', cf.model_path) - net = model.net(cf, logger).cuda() - test_predictor = Predictor(cf, None, logger, mode='test') - #cf.p_batchbalance = 0 - #cf.do_aug = False - batch_gens = data_loader.get_train_generators(cf, logger) - train_gen, val_gen = batch_gens['train'], batch_gens['val_sampling'] - test_gen = data_loader.get_test_generator(cf, logger)['test'] - weight_paths = [os.path.join(cf.fold_dir, '{}_best_params.pth'.format(rank)) for rank in - test_predictor.epoch_ranking] - - try: - pids = test_gen.dataset_pids - except: - pids = test_gen.generator.dataset_pids - print("pids in test set: ", pids) - pid = pids[0] - assert pid in pids - pid = "285" - - model_name = cf.model - - results_dir = "/home/gregor/Documents/medicaldetectiontoolkit/code_optim/"+model_name - os.makedirs(results_dir, exist_ok=True) - print("Model: {}.".format(model_name)) - #gpu_logger = utils.Nvidia_GPU_Logger() - #gpu_logger.start(interval=0.1) - #measure_train_batch_loading(logger, train_gen, iters=iters, out_dir=results_dir) - #measure_train_batch_loading(logger, val_gen, iters=iters, is_val=True, out_dir=results_dir) - #measure_RPN(logger, net, next(train_gen), iters=iters, out_dir=results_dir) - #measure_FPN(logger, net, next(train_gen), iters=iters, out_dir=results_dir) - #measure_forward(logger, net, next(train_gen), iters=iters, out_dir=results_dir) - measure_train_forward(logger, net, next(train_gen), iters=iters, out_dir=results_dir) #[['global_step', 'gpu_utilization (%)']] - #measure_train_forward(logger, net, next(val_gen), iters=iters, is_val=True, out_dir=results_dir) - #measure_train_fw_incl_batch_gen(logger, net, train_gen, iters=iters, out_dir=results_dir) - #measure_train_fw_incl_batch_gen(logger, net, val_gen, iters=iters, is_val=True, out_dir=results_dir) - #measure_train_backward(cf, logger, net, next(train_gen), iters=iters, out_dir=results_dir) - #measure_test_forward(logger, net, next(test_gen), iters=iters, return_masks=cf.return_masks_in_test) - - return results_dir, iters - -def plot_folder(cf, ax, results_dir, iters, markers='o', offset=(+0.01, -4)): - point_renaming = {"FPN_msrmts": ["FPN.forward", (offset[0], -4)], "fw_msrmts": "net.forward", - "train_bw": "backward+optimizer", - "train_fw_msrmts": "net.train_forward", - "train_fw_incl_batch": "train_fw+batch", "RPN_msrmts": "RPN.forward", - "train_fwbw_msrmts": ["train_fw+bw", (offset[0], +2)], - "val_fw_msrmts": ["val_fw", (offset[0], -4)], - "train_fwbw_incl_batch_msrmts": ["train_fw+bw+batchload", (offset[0], +2)], - "train_fwbw_incl_batch_aug_msrmts": ["train_fw+bw+batchload+aug", (-0.2, +2)], - "val_fw_incl_batch_msrmts": ["val_fw+batchload", (offset[0], -4)], - "val_loading": ["val_load", (-0.06, -4)], - "train_loading_wo_bal_fg_aug": ["train_load_w/o_bal,fg,aug", (offset[0], 2)], - "train_loading_wo_balancing": ["train_load_w/o_balancing", (-0.05, 2)], - "train_loading_wo_aug": ["train_load_w/o_aug", (offset[0], 2)], - "train_loading_wo_bal_fg": ["train_load_w/o_bal,fg", (offset[0], -4)], - "train_loading": ["train_load", (+0.01, -1.3)] - } - dfs = OrderedDict() - for file in os.listdir(results_dir): - if os.path.splitext(file)[-1]==".pickle": - dfs[file.split(os.sep)[-1].split(".")[0]] = pd.read_pickle(os.path.join(results_dir,file)) - - - for i, (name, df) in enumerate(dfs.items()): - time = (df["rel_time"].iloc[-1] - df["rel_time"].iloc[0])/iters - gpu_u = df["gpu_utilization (%)"].values.astype(int).mean() - - color = cf.color_palette[i%len(cf.color_palette)] - ax.scatter(time, gpu_u, color=color, marker=markers) - if name in point_renaming.keys(): - name = point_renaming[name] - if isinstance(name, list): - offset = name[1] - name = name[0] - ax.text(time+offset[0], gpu_u+offset[1], name, color=color) - -def analyze_measurements(cf, results_dir, iters, title=""): - fig, ax = plg.plt.subplots(1, 1) - - settings = [(results_dir, iters, 'o'), (os.path.join(results_dir, "200iters_pre_optim"), 200, 'v', (-0.08, 2)), - (os.path.join(results_dir, "200iters_after_optim"), 200, 'o')] - for args in settings: - plot_folder(cf, ax, *args) - labels = ["after optim", "pre optim"] - handles = [Line2D([0], [0], marker=settings[i][2], label=labels[i], color="w", markerfacecolor=cf.black, markersize=10) - for i in range(len(settings[:2]))] - plg.plt.legend(handles=handles, loc="best") - ax.set_xlim(0,ax.get_xlim()[1]*1.05) - ax.set_ylim(0, 100) - ax.set_ylabel("Mean GPU Utilization (%)") - ax.set_xlabel("Runtime (s)") - plg.plt.title(title+"GPU utilization vs Method Runtime\nMean Over {} Iterations".format(iters)) - - major_ticks = np.arange(0, 101, 10) - minor_ticks = np.arange(0, 101, 5) - ax.set_yticks(major_ticks) - ax.set_yticks(minor_ticks, minor=True) - ax.grid(which='minor', alpha=0.2) - ax.grid(which='major', alpha=0.5) - - - plg.plt.savefig(os.path.join(results_dir, "measurements.png")) - - - - return - - -if __name__=="__main__": - class Args(): - def __init__(self): - self.dataset_name = "datasets/prostate" - self.exp_dir = "datasets/prostate/experiments/dev" - self.server_env = False - - - args = Args() - - sys.path.append(args.dataset_name) - import data_loader - from configs import Configs - cf = configs(args.server_env) - iters = 200 - results_dir, iters = perform_measurements(args, iters=iters) - results_dir = "/home/gregor/Documents/medicaldetectiontoolkit/code_optim/" + cf.model - analyze_measurements(cf, results_dir, iters=iters, title=cf.model+": ") - - diff --git a/datasets/cityscapes/configs.py b/datasets/cityscapes/configs.py deleted file mode 100644 index ed2cdab..0000000 --- a/datasets/cityscapes/configs.py +++ /dev/null @@ -1,434 +0,0 @@ -__author__ = '' -#credit Paul F. Jaeger - -######################### -# Example Config # -######################### - -import os -import sys - -import numpy as np -from collections import namedtuple - -sys.path.append('../') -from default_configs import DefaultConfigs - -class Configs(DefaultConfigs): - - def __init__(self, server_env=None): - super(Configs, self).__init__(server_env) - - self.dim = 2 - - ######################### - # I/O # - ######################### - - self.data_sourcedir = "/mnt/HDD2TB/Documents/data/cityscapes/cs_20190715/" - if server_env: - #self.source_dir = '/home/ramien/medicaldetectiontoolkit/' - self.data_sourcedir = '/datasets/data_ramien/cityscapes/cs_20190715_npz/' - #self.data_sourcedir = "/mnt/HDD2TB/Documents/data/cityscapes/cs_6c_inst_only/" - - self.datapath = "leftImg8bit/" - self.targetspath = "gtFine/" - - self.cities = {'train':['dusseldorf', 'aachen', 'bochum', 'cologne', 'erfurt', - 'hamburg', 'hanover', 'jena', 'krefeld', 'monchengladbach', - 'strasbourg', 'stuttgart', 'tubingen', 'ulm', 'weimar', - 'zurich'], - 'val':['frankfurt', 'munster'], - 'test':['bremen', 'darmstadt', 'lindau'] } - self.set_splits = ["train", "val", "test"] # for training and val, mixed up - # test cities are not held out - - self.info_dict_name = 'city_info.pkl' - self.info_dict_path = os.path.join(self.data_sourcedir, self.info_dict_name) - self.config_path = os.path.realpath(__file__) - self.backbone_path = 'models/backbone.py' - - # one out of ['mrcnn', 'retina_net', 'retina_unet', 'detection_unet', 'detection_fpn']. - self.model = 'retina_unet' - self.model_path = 'models/{}.py'.format(self.model if not 'retina' in self.model else 'retina_net') - self.model_path = os.path.join(self.source_dir, self.model_path) - - self.select_prototype_subset = None - - ######################### - # Preprocessing # - ######################### - self.prepro = { - - 'data_dir': '/mnt/HDD2TB/Documents/data/cityscapes_raw/', #raw files (input), needs to end with "/" - 'targettype': "gtFine_instanceIds", - 'set_splits': ["train", "val", "test"], - - 'img_target_size': np.array([256, 512])*4, #y,x - - 'output_directory': self.data_sourcedir, - - 'center_of_mass_crop': True, #not implemented - #'pre_crop_size': , #z,y,x - 'normalization': {'percentiles':[1., 99.]},#not implemented - 'interpolation': 'nearest', #not implemented - - 'info_dict_path': self.info_dict_path, - - 'npz_dir' : self.data_sourcedir[:-1]+"_npz" #if not None: convert to npz, copy data here - } - - ######################### - # Architecture # - ######################### - # 'class', 'regression', 'regression_ken_gal' - # 'class': standard object classification per roi, pairwise combinable with each of below tasks. - # 'class' is only option implemented for CityScapes data set. - self.prediction_tasks = ['class',] - self.start_filts = 52 - self.end_filts = self.start_filts * 4 - self.res_architecture = 'resnet101' # 'resnet101' , 'resnet50' - self.weight_init = None # 'kaiming', 'xavier' or None for pytorch default - self.norm = 'instance_norm' # 'batch_norm' # one of 'None', 'instance_norm', 'batch_norm' - self.relu = 'relu' - - ######################### - # Data Loader # - ######################### - - self.seed = 17 - self.n_workers = 16 if server_env else os.cpu_count() - - self.batch_size = 8 - self.n_cv_splits = 10 #at least 2 (train, val) - - self.num_classes = None #set below #for instance classification (excl background) - self.num_seg_classes = None #set below #incl background - - self.create_bounding_box_targets = True - self.class_specific_seg = True - - self.channels = [0,1,2] - self.pre_crop_size = self.prepro['img_target_size'] # y,x - self.crop_margin = [10,10] #has to be smaller than respective patch_size//2 - self.patch_size_2D = [256, 512] #self.pre_crop_size #would be better to save as tuple since should not be altered - self.patch_size_3D = self.patch_size_2D + [1] - self.patch_size = self.patch_size_2D - - self.balance_target = "class_targets" - # ratio of fully random patients drawn during batch generation - # resulting batch random count is rounded down to closest integer - self.batch_random_ratio = 0.2 - - self.observables_patient = [] - self.observables_rois = [] - - ######################### - # Data Augmentation # - ######################### - #the angle rotations are implemented incorrectly in batchgenerators! in 2D, - #the x-axis angle controls the z-axis angle. - self.do_aug = True - self.da_kwargs = { - 'mirror': True, - 'mirror_axes': (1,), #image axes, (batch and channel are ignored, i.e., actual tensor dims are +2) - 'random_crop': True, - 'rand_crop_dist': (self.patch_size[0] / 2., self.patch_size[1] / 2.), - 'do_elastic_deform': True, - 'alpha': (0., 1000.), - 'sigma': (28., 30.), - 'do_rotation': True, - 'angle_x': (-np.pi / 8., np.pi / 8.), - 'angle_y': (0.,0.), - 'angle_z': (0.,0.), - 'do_scale': True, - 'scale': (0.6, 1.4), - 'border_mode_data': 'constant', - 'gamma_range': (0.6, 1.4) - } - - ################################# - # Schedule / Selection / Optim # - ################################# - #mrcnn paper: ~2.56m samples seen during coco-dataset training - self.num_epochs = 400 - self.num_train_batches = 600 - - self.do_validation = True - # decide whether to validate on entire patient volumes (like testing) or sampled patches (like training) - # the former is morge accurate, while the latter is faster (depending on volume size) - self.val_mode = 'val_sampling' # one of 'val_sampling', 'val_patient' - # if 'all' iterates over entire val_set once. - self.num_val_batches = "all" # for val_sampling - - self.save_n_models = 3 - self.min_save_thresh = 1 # in epochs - self.model_selection_criteria = {"human_ap": 1., "vehicle_ap": 0.9} - self.warm_up = 0 - - self.learning_rate = [5*1e-4] * self.num_epochs - self.dynamic_lr_scheduling = True #with scheduler set in exec - self.lr_decay_factor = 0.5 - self.scheduling_patience = int(self.num_epochs//10) - self.weight_decay = 1e-6 - self.clip_norm = None # number or None - - ######################### - # Colors and Legends # - ######################### - self.plot_frequency = 5 - - #colors - self.color_palette = [self.red, self.blue, self.green, self.orange, self.aubergine, - self.yellow, self.gray, self.cyan, self.black] - - #legends - Label = namedtuple( 'Label' , [ - 'name' , # The identifier of this label, e.g. 'car', 'person', ... . - # We use them to uniquely name a class - 'ppId' , # An integer ID that is associated with this label. - # The IDs are used to represent the label in ground truth images - # An ID of -1 means that this label does not have an ID and thus - # is ignored when creating ground truth images (e.g. license plate). - # Do not modify these IDs, since exactly these IDs are expected by the - # evaluation server. - 'id' , # Feel free to modify these IDs as suitable for your method. - # Max value is 255! - 'category' , # The name of the category that this label belongs to - 'categoryId' , # The ID of this category. Used to create ground truth images - # on category level. - 'hasInstances', # Whether this label distinguishes between single instances or not - 'ignoreInEval', # Whether pixels having this class as ground truth label are ignored - # during evaluations or not - 'color' , # The color of this label - ] ) - segLabel = namedtuple( "segLabel", ["name", "id", "color"]) - boxLabel = namedtuple( 'boxLabel', [ "name", "color"]) - - self.labels = [ - # name ppId id category catId hasInstances ignoreInEval color - Label( 'ignore' , 0 , 0 , 'void' , 0 , False , True , ( 0., 0., 0., 1.) ), - Label( 'ego vehicle' , 1 , 0 , 'void' , 0 , False , True , ( 0., 0., 0., 1.) ), - Label( 'rectification border' , 2 , 0 , 'void' , 0 , False , True , ( 0., 0., 0., 1.) ), - Label( 'out of roi' , 3 , 0 , 'void' , 0 , False , True , ( 0., 0., 0., 1.) ), - Label( 'static' , 4 , 0 , 'void' , 0 , False , True , ( 0., 0., 0., 1.) ), - Label( 'dynamic' , 5 , 0 , 'void' , 0 , False , True , (0.44, 0.29, 0., 1.) ), - Label( 'ground' , 6 , 0 , 'void' , 0 , False , True , ( 0.32, 0., 0.32, 1.) ), - Label( 'road' , 7 , 0 , 'flat' , 1 , False , False , (0.5, 0.25, 0.5, 1.) ), - Label( 'sidewalk' , 8 , 0 , 'flat' , 1 , False , False , (0.96, 0.14, 0.5, 1.) ), - Label( 'parking' , 9 , 0 , 'flat' , 1 , False , True , (0.98, 0.67, 0.63, 1.) ), - Label( 'rail track' , 10 , 0 , 'flat' , 1 , False , True , ( 0.9, 0.59, 0.55, 1.) ), - Label( 'building' , 11 , 0 , 'construction' , 2 , False , False , ( 0.27, 0.27, 0.27, 1.) ), - Label( 'wall' , 12 , 0 , 'construction' , 2 , False , False , (0.4,0.4,0.61, 1.) ), - Label( 'fence' , 13 , 0 , 'construction' , 2 , False , False , (0.75,0.6,0.6, 1.) ), - Label( 'guard rail' , 14 , 0 , 'construction' , 2 , False , True , (0.71,0.65,0.71, 1.) ), - Label( 'bridge' , 15 , 0 , 'construction' , 2 , False , True , (0.59,0.39,0.39, 1.) ), - Label( 'tunnel' , 16 , 0 , 'construction' , 2 , False , True , (0.59,0.47, 0.35, 1.) ), - Label( 'pole' , 17 , 0 , 'object' , 3 , False , False , (0.6,0.6,0.6, 1.) ), - Label( 'polegroup' , 18 , 0 , 'object' , 3 , False , True , (0.6,0.6,0.6, 1.) ), - Label( 'traffic light' , 19 , 0 , 'object' , 3 , False , False , (0.98,0.67, 0.12, 1.) ), - Label( 'traffic sign' , 20 , 0 , 'object' , 3 , False , False , (0.86,0.86, 0., 1.) ), - Label( 'vegetation' , 21 , 0 , 'nature' , 4 , False , False , (0.42,0.56, 0.14, 1.) ), - Label( 'terrain' , 22 , 0 , 'nature' , 4 , False , False , (0.6, 0.98,0.6, 1.) ), - Label( 'sky' , 23 , 0 , 'sky' , 5 , False , False , (0.27,0.51,0.71, 1.) ), - Label( 'person' , 24 , 1 , 'human' , 6 , True , False , (0.86, 0.08, 0.24, 1.) ), - Label( 'rider' , 25 , 1 , 'human' , 6 , True , False , (1., 0., 0., 1.) ), - Label( 'car' , 26 , 2 , 'vehicle' , 7 , True , False , ( 0., 0.,0.56, 1.) ), - Label( 'truck' , 27 , 2 , 'vehicle' , 7 , True , False , ( 0., 0., 0.27, 1.) ), - Label( 'bus' , 28 , 2 , 'vehicle' , 7 , True , False , ( 0., 0.24,0.39, 1.) ), - Label( 'caravan' , 29 , 2 , 'vehicle' , 7 , True , True , ( 0., 0., 0.35, 1.) ), - Label( 'trailer' , 30 , 2 , 'vehicle' , 7 , True , True , ( 0., 0.,0.43, 1.) ), - Label( 'train' , 31 , 2 , 'vehicle' , 7 , True , False , ( 0., 0.31,0.39, 1.) ), - Label( 'motorcycle' , 32 , 2 , 'vehicle' , 7 , True , False , ( 0., 0., 0.9, 1.) ), - Label( 'bicycle' , 33 , 2 , 'vehicle' , 7 , True , False , (0.47, 0.04, 0.13, 1.) ), - Label( 'license plate' , -1 , 0 , 'vehicle' , 7 , False , True , ( 0., 0., 0.56, 1.) ), - Label( 'background' , -1 , 0 , 'void' , 0 , False , True , ( 0., 0., 0.0, 0.) ), - Label( 'vehicle' , 33 , 2 , 'vehicle' , 7 , True , False , (*self.aubergine, 1.) ), - Label( 'human' , 25 , 1 , 'human' , 6 , True , False , (*self.blue, 1.) ) - ] - # evtl problem: class-ids (trainIds) don't start with 0 for the first class, 0 is bg. - #WONT WORK: class ids need to start at 0 (excluding bg!) and be consecutively numbered - - self.ppId2id = { label.ppId : label.id for label in self.labels} - self.class_id2label = { label.id : label for label in self.labels} - self.class_cmap = {label.id : label.color for label in self.labels} - self.class_dict = {label.id : label.name for label in self.labels if label.id!=0} - #c_dict: only for evaluation, remove bg class. - - self.box_type2label = {label.name : label for label in self.box_labels} - self.box_color_palette = {label.name:label.color for label in self.box_labels} - - if self.class_specific_seg: - self.seg_labels = [label for label in self.class_id2label.values()] - else: - self.seg_labels = [ - # name id color - segLabel( "bg" , 0, (1.,1.,1.,0.) ), - segLabel( "fg" , 1, (*self.orange, .8)) - ] - - self.seg_id2label = {label.id : label for label in self.seg_labels} - self.cmap = {label.id : label.color for label in self.seg_labels} - - self.plot_prediction_histograms = True - self.plot_stat_curves = False - self.has_colorchannels = True - self.plot_class_ids = True - - self.num_classes = len(self.class_dict) - self.num_seg_classes = len(self.seg_labels) - - ######################### - # Testing # - ######################### - - self.test_aug_axes = None #None or list: choices are 2,3,(2,3) - self.held_out_test_set = False - self.max_test_patients = 'all' # 'all' for all - self.report_score_level = ['rois',] # choose list from 'patient', 'rois' - self.patient_class_of_interest = 1 - - self.metrics = ['ap', 'dice'] - self.ap_match_ious = [0.1] # threshold(s) for considering a prediction as true positive - # aggregation method for test and val_patient predictions. - # wbc = weighted box clustering as in https://arxiv.org/pdf/1811.08661.pdf, - # nms = standard non-maximum suppression, or None = no clustering - self.clustering = 'wbc' - # iou thresh (exclusive!) for regarding two preds as concerning the same ROI - self.clustering_iou = 0.1 # has to be larger than desired possible overlap iou of model predictions - - self.min_det_thresh = 0.06 - self.merge_2D_to_3D_preds = False - - self.n_test_plots = 1 #per fold and rankself.ap_match_ious = [0.1] #threshold(s) for considering a prediction as true positive - self.test_n_epochs = self.save_n_models - - - ######################### - # shared model settings # - ######################### - - # max number of roi candidates to identify per image and class (slice in 2D, volume in 3D) - self.n_roi_candidates = 100 - - ######################### - # Add model specifics # - ######################### - - {'mrcnn': self.add_mrcnn_configs, 'retina_net': self.add_mrcnn_configs, 'retina_unet': self.add_mrcnn_configs - }[self.model]() - - def add_mrcnn_configs(self): - - self.scheduling_criterion = max(self.model_selection_criteria, key=self.model_selection_criteria.get) - self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' - - # number of classes for network heads: n_foreground_classes + 1 (background) - self.head_classes = self.num_classes + 1 - - # seg_classes here refers to the first stage classifier (RPN) reallY? - - # feed +/- n neighbouring slices into channel dimension. set to None for no context. - self.n_3D_context = None - - - self.frcnn_mode = False - - self.detect_while_training = True - # disable the re-sampling of mask proposals to original size for speed-up. - # since evaluation is detection-driven (box-matching) and not instance segmentation-driven (iou-matching), - # mask outputs are optional. - self.return_masks_in_train = True - self.return_masks_in_val = True - self.return_masks_in_test = True - - # feature map strides per pyramid level are inferred from architecture. anchor scales are set accordingly. - self.backbone_strides = {'xy': [4, 8, 16, 32], 'z': [1, 2, 4, 8]} - # anchor scales are chosen according to expected object sizes in data set. Default uses only one anchor scale - # per pyramid level. (outer list are pyramid levels (corresponding to BACKBONE_STRIDES), inner list are scales per level.) - self.rpn_anchor_scales = {'xy': [[4], [8], [16], [32]], 'z': [[1], [2], [4], [8]]} - # choose which pyramid levels to extract features from: P2: 0, P3: 1, P4: 2, P5: 3. - self.pyramid_levels = [0, 1, 2, 3] - # number of feature maps in rpn. typically lowered in 3D to save gpu-memory. - self.n_rpn_features = 512 if self.dim == 2 else 64 - - # anchor ratios and strides per position in feature maps. - self.rpn_anchor_ratios = [0.5, 1., 2.] - self.rpn_anchor_stride = 1 - # Threshold for first stage (RPN) non-maximum suppression (NMS): LOWER == HARDER SELECTION - self.rpn_nms_threshold = 0.7 - - # loss sampling settings. - self.rpn_train_anchors_per_image = 8 - self.train_rois_per_image = 10 # per batch_instance - self.roi_positive_ratio = 0.5 - self.anchor_matching_iou = 0.8 - - # k negative example candidates are drawn from a pool of size k*shem_poolsize (stochastic hard-example mining), - # where k<=#positive examples. - self.shem_poolsize = 3 - - self.pool_size = (7, 7) if self.dim == 2 else (7, 7, 3) - self.mask_pool_size = (14, 14) if self.dim == 2 else (14, 14, 5) - self.mask_shape = (28, 28) if self.dim == 2 else (28, 28, 10) - - self.rpn_bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) - self.bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) - self.window = np.array([0, 0, self.patch_size[0], self.patch_size[1], 0, self.patch_size_3D[2]]) - self.scale = np.array([self.patch_size[0], self.patch_size[1], self.patch_size[0], self.patch_size[1], - self.patch_size_3D[2], self.patch_size_3D[2]]) # y1,x1,y2,x2,z1,z2 - - if self.dim == 2: - self.rpn_bbox_std_dev = self.rpn_bbox_std_dev[:4] - self.bbox_std_dev = self.bbox_std_dev[:4] - self.window = self.window[:4] - self.scale = self.scale[:4] - - self.plot_y_max = 1.5 - self.n_plot_rpn_props = 5 # per batch_instance (slice in 2D / patient in 3D) - - # pre-selection in proposal-layer (stage 1) for NMS-speedup. applied per batch element. - self.pre_nms_limit = 3000 - - # n_proposals to be selected after NMS per batch element. too high numbers blow up memory if "detect_while_training" is True, - # since proposals of the entire batch are forwarded through second stage as one "batch". - self.roi_batch_size = 2500 - self.post_nms_rois_training = 500 - self.post_nms_rois_inference = 500 - - # Final selection of detections (refine_detections) - self.model_max_instances_per_batch_element = 50 # per batch element and class. - self.detection_nms_threshold = 1e-5 # needs to be > 0, otherwise all predictions are one cluster. - self.model_min_confidence = 0.05 # iou for nms in box refining (directly after heads), should be >0 since ths>=x in mrcnn.py - - if self.dim == 2: - self.backbone_shapes = np.array( - [[int(np.ceil(self.patch_size[0] / stride)), - int(np.ceil(self.patch_size[1] / stride))] - for stride in self.backbone_strides['xy']]) - else: - self.backbone_shapes = np.array( - [[int(np.ceil(self.patch_size[0] / stride)), - int(np.ceil(self.patch_size[1] / stride)), - int(np.ceil(self.patch_size[2] / stride_z))] - for stride, stride_z in zip(self.backbone_strides['xy'], self.backbone_strides['z'] - )]) - - if self.model == 'retina_net' or self.model == 'retina_unet': - # implement extra anchor-scales according to https://arxiv.org/abs/1708.02002 - self.rpn_anchor_scales['xy'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in - self.rpn_anchor_scales['xy']] - self.rpn_anchor_scales['z'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in - self.rpn_anchor_scales['z']] - self.n_anchors_per_pos = len(self.rpn_anchor_ratios) * 3 - - self.n_rpn_features = 256 if self.dim == 2 else 64 - - # pre-selection of detections for NMS-speedup. per entire batch. - self.pre_nms_limit = 10000 if self.dim == 2 else 30000 - - # anchor matching iou is lower than in Mask R-CNN according to https://arxiv.org/abs/1708.02002 - self.anchor_matching_iou = 0.5 - - if self.model == 'retina_unet': - self.operate_stride1 = True \ No newline at end of file diff --git a/datasets/cityscapes/data_loader.py b/datasets/cityscapes/data_loader.py deleted file mode 100644 index 01a1a45..0000000 --- a/datasets/cityscapes/data_loader.py +++ /dev/null @@ -1,452 +0,0 @@ -import sys -sys.path.append('../') #works on cluster indep from where sbatch job is started -import plotting as plg - -import warnings -import os -import time -import pickle - - -import numpy as np -import pandas as pd -from PIL import Image as pil - -import torch -import torch.utils.data - -# batch generator tools from https://github.com/MIC-DKFZ/batchgenerators -from batchgenerators.transforms.spatial_transforms import MirrorTransform as Mirror -from batchgenerators.transforms.abstract_transforms import Compose -from batchgenerators.dataloading.multi_threaded_augmenter import MultiThreadedAugmenter -from batchgenerators.transforms.spatial_transforms import SpatialTransform -from batchgenerators.transforms.crop_and_pad_transforms import CenterCropTransform -from batchgenerators.transforms.color_transforms import GammaTransform -#from batchgenerators.transforms.utility_transforms import ConvertSegToBoundingBoxCoordinates - - -sys.path.append(os.path.dirname(os.path.realpath(__file__))) - -import utils.exp_utils as utils -import utils.dataloader_utils as dutils -from utils.dataloader_utils import ConvertSegToBoundingBoxCoordinates - -from configs import Configs -cf= configs() - - -warnings.filterwarnings("ignore", message="This figure includes Axes.*") - - -def load_obj(file_path): - with open(file_path, 'rb') as handle: - return pickle.load(handle) - -def save_to_npy(arr_out, array): - np.save(arr_out+".npy", array) - print("Saved binary .npy-file to {}".format(arr_out)) - return arr_out+".npy" - -def shape_small_first(shape): - if len(shape)<=2: #no changing dimensions if channel-dim is missing - return shape - smallest_dim = np.argmin(shape) - if smallest_dim!=0: #assume that smallest dim is color channel - new_shape = np.array(shape) #to support mask indexing - new_shape = (new_shape[smallest_dim], - *new_shape[(np.arange(len(shape),dtype=int)!=smallest_dim)]) - return new_shape - else: - return shape - -class Dataset(dutils.Dataset): - def __init__(self, cf, logger=None, subset_ids=None, data_sourcedir=None): - super(Dataset, self).__init__(cf, data_sourcedir=data_sourcedir) - - info_dict = load_obj(cf.info_dict_path) - - if subset_ids is not None: - img_ids = subset_ids - if logger is None: - print('subset: selected {} instances from df'.format(len(pids))) - else: - logger.info('subset: selected {} instances from df'.format(len(pids))) - else: - img_ids = list(info_dict.keys()) - - #evtly copy data from data_rootdir to data_dir - if cf.server_env and not hasattr(cf, "data_dir"): - file_subset = [info_dict[img_id]['img'][:-3]+"*" for img_id in img_ids] - file_subset+= [info_dict[img_id]['seg'][:-3]+"*" for img_id in img_ids] - file_subset+= [cf.info_dict_path] - self.copy_data(cf, file_subset=file_subset) - cf.data_dir = self.data_dir - - img_paths = [os.path.join(self.data_dir, info_dict[img_id]['img']) for img_id in img_ids] - seg_paths = [os.path.join(self.data_dir, info_dict[img_id]['seg']) for img_id in img_ids] - - # load all subject files - self.data = {} - for i, img_id in enumerate(img_ids): - subj_data = {'img_id':img_id} - subj_data['img'] = img_paths[i] - subj_data['seg'] = seg_paths[i] - if 'class' in self.cf.prediction_tasks: - subj_data['class_targets'] = np.array(info_dict[img_id]['roi_classes']) - else: - subj_data['class_targets'] = np.ones_like(np.array(info_dict[img_id]['roi_classes'])) - - self.data[img_id] = subj_data - - cf.roi_items = cf.observables_rois[:] - cf.roi_items += ['class_targets'] - if 'regression' in cf.prediction_tasks: - cf.roi_items += ['regression_targets'] - - self.set_ids = list(self.data.keys()) - - self.df = None - -class BatchGenerator(dutils.BatchGenerator): - """ - create the training/validation batch generator. Randomly sample batch_size patients - from the data set, (draw a random slice if 2D), pad-crop them to equal sizes and merge to an array. - :param data: data dictionary as provided by 'load_dataset' - :param img_modalities: list of strings ['adc', 'b1500'] from config - :param batch_size: number of patients to sample for the batch - :param pre_crop_size: equal size for merging the patients to a single array (before the final random-crop in data aug.) - :return dictionary containing the batch data / seg / pids as lists; the augmenter will later concatenate them into an array. - """ - def __init__(self, cf, data, n_batches=None, sample_pids_w_replace=True): - super(BatchGenerator, self).__init__(cf, data, n_batches) - self.dataset_length = len(self._data) - self.cf = cf - - self.sample_pids_w_replace = sample_pids_w_replace - self.eligible_pids = list(self._data.keys()) - - self.chans = cf.channels if cf.channels is not None else np.index_exp[:] - assert hasattr(self.chans, "__iter__"), "self.chans has to be list-like to maintain dims when slicing" - - self.p_fg = 0.5 - self.empty_samples_max_ratio = 0.33 - self.random_count = int(cf.batch_random_ratio * cf.batch_size) - - self.balance_target_distribution(plot=sample_pids_w_replace) - self.stats = {"roi_counts" : np.zeros((len(self.unique_ts),), dtype='uint32'), "empty_samples_count" : 0} - - def generate_train_batch(self): - #everything done in here is per batch - #print statements in here get confusing due to multithreading - if self.sample_pids_w_replace: - # fully random patients - batch_patient_ids = list(np.random.choice(self.dataset_pids, size=self.random_count, replace=False)) - # target-balanced patients - batch_patient_ids += list(np.random.choice( - self.dataset_pids, size=self.batch_size - self.random_count, replace=False, p=self.p_probs)) - else: - batch_patient_ids = np.random.choice(self.eligible_pids, size=self.batch_size, replace=False) - if self.sample_pids_w_replace == False: - self.eligible_pids = [pid for pid in self.eligible_pids if pid not in batch_patient_ids] - if len(self.eligible_pids) < self.batch_size: - self.eligible_pids = self.dataset_pids - - batch_data, batch_segs, batch_class_targets = [], [], [] - # record roi count of classes in batch - batch_roi_counts, empty_samples_count = np.zeros((self.cf.num_classes,), dtype='uint32'), 0 - - for sample in range(self.batch_size): - - patient = self._data[batch_patient_ids[sample]] - - data = np.load(patient["img"], mmap_mode="r") - seg = np.load(patient['seg'], mmap_mode="r") - - (c,y,x) = data.shape - spatial_shp = data[0].shape - assert spatial_shp==seg.shape, "spatial shape incongruence betw. data {} and seg {}".format(spatial_shp, seg.shape) - - if np.any([spatial_shp[ix] < self.cf.pre_crop_size[ix] for ix in range(len(spatial_shp))]): - new_shape = [np.max([spatial_shp[ix], self.cf.pre_crop_size[ix]]) for ix in range(len(spatial_shp))] - data = dutils.pad_nd_image(data, (len(data), *new_shape)) - seg = dutils.pad_nd_image(seg, new_shape) - - #eventual cropping to pre_crop_size: with prob self.p_fg sample pixel from random ROI and shift center, - #if possible, to that pixel, so that img still contains ROI after pre-cropping - dim_cropflags = [spatial_shp[i] > self.cf.pre_crop_size[i] for i in range(len(spatial_shp))] - if np.any(dim_cropflags): - #sample crop center regardless of ROIs, not guaranteed to be empty - def get_cropped_centercoords(dim): - return np.random.randint(low=self.cf.pre_crop_size[dim]//2, - high=spatial_shp[dim] - self.cf.pre_crop_size[dim]//2) - - sample_seg_center = {} - for dim in np.where(dim_cropflags)[0]: - sample_seg_center[dim] = get_cropped_centercoords(dim) - min_ = int(sample_seg_center[dim] - self.cf.pre_crop_size[dim]//2) - max_ = int(sample_seg_center[dim] + self.cf.pre_crop_size[dim]//2) - data = np.take(data, indices=range(min_, max_), axis=dim+1) #+1 for channeldim - seg = np.take(seg, indices=range(min_, max_), axis=dim) - - batch_data.append(data) - batch_segs.append(seg[np.newaxis]) - - batch_class_targets.append(patient['class_targets']) - - for cl in range(self.cf.num_classes): - batch_roi_counts[cl] += np.count_nonzero(patient['class_targets'][np.unique(seg[seg>0]) - 1] == cl) - if not np.any(seg): - empty_samples_count += 1 - - batch = {'data': np.array(batch_data).astype('float32'), 'seg': np.array(batch_segs).astype('uint8'), - 'pid': batch_patient_ids, 'class_targets': np.array(batch_class_targets), - 'roi_counts': batch_roi_counts, 'empty_samples_count': empty_samples_count} - return batch - -class PatientBatchIterator(dutils.PatientBatchIterator): - """ - creates a val/test generator. Step through the dataset and return dictionaries per patient. - For Patching, shifts all patches into batch dimension. batch_tiling_forward will take care of exceeding batch dimensions. - - This iterator/these batches are not intended to go through MTaugmenter afterwards - """ - - def __init__(self, cf, data): - super(PatientBatchIterator, self).__init__(cf, data) - - self.patch_size = cf.patch_size - - self.patient_ix = 0 # running index over all patients in set - - def generate_train_batch(self, pid=None): - - if self.patient_ix == len(self.dataset_pids): - self.patient_ix = 0 - if pid is None: - pid = self.dataset_pids[self.patient_ix] # + self.thread_id - patient = self._data[pid] - batch_class_targets = np.array([patient['class_targets']]) - - data = np.load(patient["img"], mmap_mode="r")[np.newaxis] - seg = np.load(patient['seg'], mmap_mode="r")[np.newaxis, np.newaxis] - (b, c, y, x) = data.shape - spatial_shp = data.shape[2:] - assert spatial_shp == seg.shape[2:], "spatial shape incongruence betw. data {} and seg {}".format(spatial_shp, - seg.shape) - if np.any([spatial_shp[ix] < self.cf.pre_crop_size[ix] for ix in range(len(spatial_shp))]): - new_shape = [np.max([spatial_shp[ix], self.cf.pre_crop_size[ix]]) for ix in range(len(spatial_shp))] - data = dutils.pad_nd_image(data, (len(data), *new_shape)) - seg = dutils.pad_nd_image(seg, new_shape) - - batch = {'data': data, 'seg': seg, 'class_targets': batch_class_targets} - converter = ConvertSegToBoundingBoxCoordinates(self.cf.dim, self.cf.roi_items, False, self.cf.class_specific_seg) - batch = converter(**batch) - batch.update({'patient_bb_target': batch['bb_target'], - 'patient_class_targets': batch['class_targets'], - 'original_img_shape': data.shape, - 'pid': np.array([pid] * len(data))}) - - # eventual tiling into patches - spatial_shp = batch["data"].shape[2:] - if np.any([spatial_shp[ix] > self.patch_size[ix] for ix in range(len(spatial_shp))]): - patient_batch = batch - print("patientiterator produced patched batch!") - patch_crop_coords_list = dutils.get_patch_crop_coords(data[0], self.patch_size) - new_img_batch, new_seg_batch = [], [] - - for c in patch_crop_coords_list: - new_img_batch.append(data[:, c[0]:c[1], c[2]:c[3]]) - seg_patch = seg[:, c[0]:c[1], c[2]: c[3]] - new_seg_batch.append(seg_patch) - - shps = [] - for arr in new_img_batch: - shps.append(arr.shape) - - data = np.array(new_img_batch) # (patches, c, x, y, z) - seg = np.array(new_seg_batch) - batch_class_targets = np.repeat(batch_class_targets, len(patch_crop_coords_list), axis=0) - - patch_batch = {'data': data.astype('float32'), 'seg': seg.astype('uint8'), - 'class_targets': batch_class_targets, - 'pid': np.array([pid] * data.shape[0])} - patch_batch['patch_crop_coords'] = np.array(patch_crop_coords_list) - patch_batch['patient_bb_target'] = patient_batch['patient_bb_target'] - patch_batch['patient_class_targets'] = patient_batch['patient_class_targets'] - patch_batch['patient_data'] = patient_batch['data'] - patch_batch['patient_seg'] = patient_batch['seg'] - patch_batch['original_img_shape'] = patient_batch['original_img_shape'] - - converter = ConvertSegToBoundingBoxCoordinates(self.cf.dim, self.cf.roi_items, False, self.cf.class_specific_seg) - patch_batch = converter(**patch_batch) - batch = patch_batch - - self.patient_ix += 1 - if self.patient_ix == len(self.dataset_pids): - self.patient_ix = 0 - - return batch - -def create_data_gen_pipeline(cf, patient_data, do_aug=True, sample_pids_w_replace=True): - """ - create mutli-threaded train/val/test batch generation and augmentation pipeline. - :param patient_data: dictionary containing one dictionary per patient in the train/test subset - :param test_pids: (optional) list of test patient ids, calls the test generator. - :param do_aug: (optional) whether to perform data augmentation (training) or not (validation/testing) - :return: multithreaded_generator - """ - data_gen = BatchGenerator(cf, patient_data, sample_pids_w_replace=sample_pids_w_replace) - - my_transforms = [] - if do_aug: - if cf.da_kwargs["mirror"]: - mirror_transform = Mirror(axes=cf.da_kwargs['mirror_axes']) - my_transforms.append(mirror_transform) - spatial_transform = SpatialTransform(patch_size=cf.patch_size[:cf.dim], - patch_center_dist_from_border=cf.da_kwargs['rand_crop_dist'][:2], - do_elastic_deform=cf.da_kwargs['do_elastic_deform'], - alpha=cf.da_kwargs['alpha'], sigma=cf.da_kwargs['sigma'], - do_rotation=cf.da_kwargs['do_rotation'], angle_x=cf.da_kwargs['angle_x'], - angle_y=cf.da_kwargs['angle_y'], angle_z=cf.da_kwargs['angle_z'], - do_scale=cf.da_kwargs['do_scale'], scale=cf.da_kwargs['scale'], - random_crop=cf.da_kwargs['random_crop'], - border_mode_data=cf.da_kwargs['border_mode_data']) - my_transforms.append(spatial_transform) - gamma_transform = GammaTransform(gamma_range=cf.da_kwargs["gamma_range"], invert_image=False, - per_channel=False, retain_stats=False) - my_transforms.append(gamma_transform) - - else: - my_transforms.append(CenterCropTransform(crop_size=cf.patch_size[:cf.dim])) - - if cf.create_bounding_box_targets: - my_transforms.append(ConvertSegToBoundingBoxCoordinates(cf.dim, cf.roi_items, False, cf.class_specific_seg)) - #batch receives entry 'bb_target' w bbox coordinates as [y1,x1,y2,x2,z1,z2]. - #my_transforms.append(ConvertSegToOnehotTransform(classes=range(cf.num_seg_classes))) - all_transforms = Compose(my_transforms) - #MTAugmenter creates iterator from data iterator data_gen after applying the composed transform all_transforms - multithreaded_generator = MultiThreadedAugmenter(data_gen, all_transforms, num_processes=cf.n_workers, - seeds=np.random.randint(0,cf.n_workers*2,size=cf.n_workers)) - return multithreaded_generator - - -def get_train_generators(cf, logger, data_statistics=True): - """ - wrapper function for creating the training batch generator pipeline. returns the train/val generators - need to select cv folds on patient level, but be able to include both breasts of each patient. - """ - dataset = Dataset(cf) - dataset.init_FoldGenerator(cf.seed, cf.n_cv_splits) - dataset.generate_splits(check_file=os.path.join(cf.exp_dir, 'fold_ids.pickle')) - set_splits = dataset.fg.splits - - test_ids, val_ids = set_splits.pop(cf.fold), set_splits.pop(cf.fold - 1) - train_ids = np.concatenate(set_splits, axis=0) - - if cf.held_out_test_set: - train_ids = np.concatenate((train_ids, test_ids), axis=0) - test_ids = [] - - train_data = {k: v for (k, v) in dataset.data.items() if k in train_ids} - val_data = {k: v for (k, v) in dataset.data.items() if k in val_ids} - - logger.info("data set loaded with: {} train / {} val / {} test patients".format(len(train_ids), len(val_ids), - len(test_ids))) - if data_statistics: - dataset.calc_statistics(subsets={"train": train_ids, "val": val_ids, "test": test_ids}, - plot_dir=os.path.join(cf.plot_dir, "data_stats_fold_"+str(cf.fold))) - - batch_gen = {} - batch_gen['train'] = create_data_gen_pipeline(cf, train_data, do_aug=True) - batch_gen[cf.val_mode] = create_data_gen_pipeline(cf, val_data, do_aug=False, sample_pids_w_replace=False) - batch_gen['n_val'] = cf.num_val_batches if cf.num_val_batches!="all" else len(val_data) - - return batch_gen - -def get_test_generator(cf, logger): - """ - if get_test_generators is called multiple times in server env, every time of - Dataset initiation rsync will check for copying the data; this should be okay - since rsync will not copy if files already exist in destination. - """ - - if cf.held_out_test_set: - sourcedir = cf.test_data_sourcedir - test_ids = None - else: - sourcedir = None - with open(os.path.join(cf.exp_dir, 'fold_ids.pickle'), 'rb') as handle: - set_splits = pickle.load(handle) - test_ids = set_splits[cf.fold] - - - test_set = Dataset(cf, test_ids, data_sourcedir=sourcedir) - logger.info("data set loaded with: {} test patients".format(len(test_set.set_ids))) - batch_gen = {} - batch_gen['test'] = PatientBatchIterator(cf, test_set.data) - batch_gen['n_test'] = len(test_set.set_ids) if cf.max_test_patients=="all" else min(cf.max_test_patients, len(test_set.set_ids)) - - return batch_gen - -def main(): - total_stime = time.time() - times = {} - - CUDA = torch.cuda.is_available() - print("CUDA available: ", CUDA) - - - #cf.server_env = True - #cf.data_dir = "experiments/dev_data" - - cf.exp_dir = "experiments/dev/" - cf.plot_dir = cf.exp_dir+"plots" - os.makedirs(cf.exp_dir, exist_ok=True) - cf.fold = 0 - logger = utils.get_logger(cf.exp_dir) - - gens = get_train_generators(cf, logger) - train_loader = gens['train'] - - #for i in range(train_loader.dataset_length): - # print("batch", i) - stime = time.time() - ex_batch = next(train_loader) - # plg.view_batch(cf, ex_batch, out_file="experiments/dev/dev_extrainbatch.png", has_colorchannels=True, isRGB=True) - times["train_batch"] = time.time()-stime - - - val_loader = gens['val_sampling'] - stime = time.time() - ex_batch = next(val_loader) - times["val_batch"] = time.time()-stime - stime = time.time() - plg.view_batch(cf, ex_batch, out_file="experiments/dev/dev_exvalbatch.png", has_colorchannels=True, isRGB=True, show_gt_boxes=False) - times["val_plot"] = time.time()-stime - - test_loader = get_test_generator(cf, logger)["test"] - stime = time.time() - ex_batch = next(test_loader) - times["test_batch"] = time.time()-stime - #plg.view_batch(cf, ex_batch, out_file="experiments/dev/dev_expatientbatch.png", has_colorchannels=True, isRGB=True) - - print(ex_batch["data"].shape) - - - print("Times recorded throughout:") - for (k,v) in times.items(): - print(k, "{:.2f}".format(v)) - - mins, secs = divmod((time.time() - total_stime), 60) - h, mins = divmod(mins, 60) - t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) - print("{} total runtime: {}".format(os.path.split(__file__)[1], t)) - - - -if __name__=="__main__": - start_time = time.time() - - main() - - print("Program runtime in s: ", '{:.2f}'.format(time.time()-start_time)) \ No newline at end of file diff --git a/datasets/cityscapes/preprocessing.py b/datasets/cityscapes/preprocessing.py deleted file mode 100644 index 56c8c20..0000000 --- a/datasets/cityscapes/preprocessing.py +++ /dev/null @@ -1,267 +0,0 @@ -import sys -import os -from multiprocessing import Pool -import time -import pickle - -import numpy as np - -from PIL import Image as pil -from matplotlib import pyplot as plt - -sys.path.append("../") -import data_manager as dmanager - -from configs import Configs -cf = configs() - - -""" -""" - -def load_obj(file_path): - with open(file_path, 'rb') as handle: - return pickle.load(handle) - -def save_obj(obj, path): - """Pickle a python object.""" - with open(path, 'wb') as f: - pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) - -def merge_labelids(target, cf=cf): - """relabel preprocessing id to training id according to config.labels - :param target: np.array hxw holding the annotation (labelids at pixel positions) - :cf: The configurations file - """ - for i in range(target.shape[0]): #Iterate over height. - for j in range(target.shape[1]): #Iterate over width - target[i][j] = cf.ppId2id[int(target[i][j])] - - return target - -def generate_detection_labels(target, cf=cf): - """labels suitable to be used with batchgenerators.ConvertSegToBoundingBoxCoordinates. - Flaw: cannot handle more than 2 segmentation classes (fg/bg). - --> seg-info is lost, but not interested in seg rn anyway. - :param target: expected as instanceIds img - The pixel values encode both, class and the individual instance. - The integer part of a division by 1000 of each ID provides the class ID, - as described in labels.py. The remainder is the instance ID. If a certain - annotation describes multiple instances, then the pixels have the regular - ID of that class. - """ - - unique_IDs = np.unique(target) - roi_classes = [] - - objs_in_img = 0 - for i, instanceID in enumerate(unique_IDs): - if instanceID > max(list(cf.ppId2id.keys())): - instance_classID = instanceID // 1000 - else: - # this is the group case (only class id assigned, no instance id) - instance_classID = instanceID - if cf.ppId2id[instance_classID]!=0: - #discard this whole sample since it has group instead of - #single instance annotations for a non-bg class - return None, None - - if cf.ppId2id[instance_classID]!=0: - #only pick reasonable objects, exclude road, sky, etc. - roi_classes.append(cf.ppId2id[instance_classID]) - objs_in_img+=1 #since 0 is bg - target[target==instanceID] = objs_in_img - else: - target[target==instanceID] = 0 - - return target, roi_classes - -class Preprocessor(): - - def __init__(self, cf, cities): - - self._cf = cf.prepro - - self.rootpath = cf.prepro['data_dir'] - self.set_splits = self._cf["set_splits"] - self.cities = cities - self.datapath = cf.datapath - self.targetspath = cf.targetspath - self.targettype = cf.prepro["targettype"] - - self.img_t_size = cf.prepro["img_target_size"] - self.target_t_size = self.img_t_size - - self.rootpath_out = cf.prepro["output_directory"] - - self.info_dict = {} - """info_dict: will hold {img_identifier: img_dict} with - img_dict = {id: img_identifier, img:img_path, seg:seg_path, - roi_classes:roiclasses} - """ - - def load_from_path_to_path(self, set_split, max_num=None): - """composes data and corresponding labels paths (to .png-files). - - assumes data tree structure: datapath-|-->city1-->img1.png,img2.png,... - |-->city2-->img1.png, ... - """ - data = [] - labels = [] - num=0 - for city in self.cities[set_split]: - path = os.path.join(self.rootpath, self.datapath, set_split, city) - lpath = os.path.join(self.rootpath,self.targetspath,set_split, city) - - files_in_dir = os.listdir(path) - for file in files_in_dir: - split = os.path.splitext(file) - if split[1].lower() == ".png": - num+=1 - filetag = file[:-(len(self.datapath)+3)] - data.append(os.path.join(path,file)) - labels.append(os.path.join(lpath,filetag+self.targettype+".png")) - - if num==max_num: - break - if num==max_num: - break - - return data, labels - - def prep_img(self, args): - """suited for multithreading. - :param args: (img_path, targ_path) - """ - - img_path, trg_path = args[0], args[1] - - img_rel_path = img_path[len(self.rootpath):] - trg_rel_path = trg_path[len(self.rootpath):] - - _path, img_name = os.path.split(img_path) - img_identifier = "".join(img_name.split("_")[:3]) - img_info_dict = {} #entry of img_identifier in full info_dict - - img, target = pil.open(img_path), pil.open(trg_path) - img, target = img.resize(self.img_t_size[::-1]), target.resize(self.target_t_size[::-1]) - img, target = np.array(img), np.array(target) #shapes y,x(,c) - img = np.transpose(img, axes=(2,0,1)) #shapes (c,)y,x - - target, roi_classes = generate_detection_labels(target) - if target is None: - return (img_identifier, target) - img_info_dict["roi_classes"] = roi_classes - - path = os.path.join(self.rootpath_out,*img_rel_path.split(os.path.sep)[:-1]) - os.makedirs(path, exist_ok=True) - - img_path = os.path.join(self.rootpath_out, img_rel_path[:-3]+"npy") - - #img.save(img_path) - img_info_dict["img"] = img_rel_path[:-3]+"npy" - np.save(img_path, img) - - path = os.path.join(self.rootpath_out,*trg_rel_path.split(os.path.sep)[:-1]) - os.makedirs(path, exist_ok=True) - t_path = os.path.join(self.rootpath_out, trg_rel_path)[:-3]+"npy" - #target.save(t_path) - img_info_dict["seg"] = trg_rel_path[:-3]+"npy" - np.save(t_path, target) - - print("\rSaved npy images and targets of shapes {}, {} to files\n {},\n {}". \ - format(img.shape, target.shape, img_path, t_path), flush=True, end="") - - return (img_identifier, img_info_dict) - - def prep_imgs(self, max_num=None, processes=4): - self.info_dict = {} - self.discarded = [] - os.makedirs(self.rootpath_out, exist_ok=True) - for set_split in self.set_splits: - data, targets = self.load_from_path_to_path(set_split, max_num=max_num) - - print(next(zip(data, targets))) - p = Pool(processes) - - img_info_dicts = p.map(self.prep_img, zip(data, targets)) - - p.close() - p.join() - - self.info_dict.update({id_:dict_ for (id_,dict_) in img_info_dicts if dict_ is not None}) - self.discarded += [id_ for (id_, dict_) in img_info_dicts if dict_ is None] - #list of samples discarded due to group instead of single instance annotation - - def finish(self): - total_items = len(self.info_dict)+len(self.discarded) - - print("\n\nSamples discarded: {}/{}={:.1f}%, identifiers:".format(len(self.discarded), - total_items, len(self.discarded)/total_items*100)) - for id_ in self.discarded: - print(id_) - - save_obj(self.info_dict, self._cf["info_dict_path"]) - - - def convert_copy_npz(self): - if not self._cf["npz_dir"]: - return - print("converting & copying to npz dir", self._cf['npz_dir']) - os.makedirs(self._cf['npz_dir'], exist_ok=True) - save_obj(self.info_dict, os.path.join(self._cf['npz_dir'], - self._cf['info_dict_path'].split("/")[-1])) - - dmanager.pack_dataset(self._cf["output_directory"], self._cf["npz_dir"], recursive=True, verbose=False) - - - def verification(self, max_num=None): - print("\n\n\nVerification\n") - for i, k in enumerate(self.info_dict): - if max_num is not None and i==max_num: - break - - subject = self.info_dict[k] - - seg = np.load(os.path.join(self.rootpath_out, subject["seg"])) - - #print("seg values", np.unique(seg)) - print("nr of objects", len(subject["roi_classes"])) - print("nr of objects should equal highest seg value, fulfilled?", - np.max(seg)==len(subject["roi_classes"])) - #print("roi_classes", subject["roi_classes"]) - - img = np.transpose(np.load(os.path.join(self.rootpath_out, subject["img"])), axes=(1,2,0)) - print("img shp", img.shape) - plt.imshow(img) - - -def main(): - #cf.set_splits = ["train"] - #cities = {'train':['dusseldorf'], 'val':['frankfurt']} #cf.cities - cities= cf.cities - - pp = Preprocessor(cf, cities) - pp.prep_imgs(max_num=None, processes=8) - pp.finish() - - #pp.convert_copy_npz() - - pp.verification(1) - - - - - - - return - -if __name__=="__main__": - stime = time.time() - - main() - - mins, secs = divmod((time.time() - stime), 60) - h, mins = divmod(mins, 60) - t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) - print("Prepro program runtime: {}".format(t)) diff --git a/datasets/legacy/convert_folds_ids.py b/datasets/legacy/convert_folds_ids.py deleted file mode 100644 index ba16b34..0000000 --- a/datasets/legacy/convert_folds_ids.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -Created at 28.05.19 16:46 -@author: gregor -""" - -import os -import sys -import subprocess - -import pickle -import numpy as np -import pandas as pd -from collections import OrderedDict - -import utils.exp_utils as utils - -def get_cf(dataset_name, exp_dir=""): - - cf_path = os.path.join('datasets', dataset_name, exp_dir, "configs.py") - cf_file = utils.import_module('configs', cf_path) - - return cf_file.Configs() - -def vector(item): - """ensure item is vector-like (list or array or tuple) - :param item: anything - """ - if not isinstance(item, (list, tuple, np.ndarray)): - item = [item] - return item - -def load_dataset(cf, subset_ixs=None): - """ - loads the dataset. if deployed in cloud also copies and unpacks the data to the working directory. - :param subset_ixs: subset indices to be loaded from the dataset. used e.g. for testing to only load the test folds. - :return: data: dictionary with one entry per patient (in this case per patient-breast, since they are treated as - individual images for training) each entry is a dictionary containing respective meta-info as well as paths to the preprocessed - numpy arrays to be loaded during batch-generation - """ - - p_df = pd.read_pickle(os.path.join(cf.pp_data_path, cf.input_df_name)) - - exclude_pids = ["0305a", "0447a"] # due to non-bg segmentation but bg mal label in nodules 5728, 8840 - p_df = p_df[~p_df.pid.isin(exclude_pids)] - - if cf.select_prototype_subset is not None: - prototype_pids = p_df.pid.tolist()[:cf.select_prototype_subset] - p_df = p_df[p_df.pid.isin(prototype_pids)] - logger.warning('WARNING: using prototyping data subset!!!') - if subset_ixs is not None: - subset_pids = [np.unique(p_df.pid.tolist())[ix] for ix in subset_ixs] - p_df = p_df[p_df.pid.isin(subset_pids)] - - print('subset: selected {} instances from df'.format(len(p_df))) - - pids = p_df.pid.tolist() - cf.data_dir = cf.pp_data_path - - - imgs = [os.path.join(cf.data_dir, '{}_img.npy'.format(pid)) for pid in pids] - segs = [os.path.join(cf.data_dir,'{}_rois.npz'.format(pid)) for pid in pids] - orig_class_targets = p_df['class_target'].tolist() - - data = OrderedDict() - for ix, pid in enumerate(pids): - data[pid] = {'data': imgs[ix], 'seg': segs[ix], 'pid': pid} - data[pid]['fg_slices'] = np.array(p_df['fg_slices'].tolist()[ix]) - if 'class' in cf.prediction_tasks: - # malignancy scores are binarized: (benign: 1-2 --> cl 1, malignant: 3-5 --> cl 2) - raise NotImplementedError - # todo need to consider bg - data[pid]['class_targets'] = np.array([ [2 if ii >= 3 else 1 for ii in four_fold_targs] for four_fold_targs in orig_class_targets[ix]]) - else: - data[pid]['class_targets'] = np.array([ [1 if ii>0 else 0 for ii in four_fold_targs] for four_fold_targs in orig_class_targets[ix]], dtype='uint8') - if any(['regression' in task for task in cf.prediction_tasks]): - data[pid]["regression_targets"] = np.array([ [vector(v) for v in four_fold_targs] for four_fold_targs in orig_class_targets[ix] ], dtype='float16') - data[pid]["rg_bin_targets"] = np.array([ [cf.rg_val_to_bin_id(v) for v in four_fold_targs] for four_fold_targs in data[pid]["regression_targets"]], dtype='uint8') - - cf.roi_items = cf.observables_rois[:] - cf.roi_items += ['class_targets'] - if any(['regression' in task for task in cf.prediction_tasks]): - cf.roi_items += ['regression_targets'] - cf.roi_items += ['rg_bin_targets'] - - return data - - -def get_patient_identifiers(cf, fold_lists): - - - all_data = load_dataset(cf) - all_pids_list = np.unique([v['pid'] for (k, v) in all_data.items()]) - - - verifier = [] #list of folds - for fold in range(cf.n_cv_splits): - train_ix, val_ix, test_ix, fold_nr = fold_lists[fold] - assert fold==fold_nr - test_ids = [all_pids_list[ix] for ix in test_ix] - for ix, arr in enumerate(verifier): - inter = np.intersect1d(test_ids, arr) - #print("intersect of fold {} with fold {}: {}".format(fold, ix, inter)) - assert len(inter)==0 - verifier.append(test_ids) - - - return verifier - -def convert_folds_ids(exp_dir): - import inference_analysis - cf = get_cf('lidc', exp_dir=exp_dir) - cf.exp_dir = exp_dir - with open(os.path.join(exp_dir, 'fold_ids.pickle'), 'rb') as f: - fids = pickle.load(f) - - pid_fold_splits = get_patient_identifiers(cf, fids) - - with open(os.path.join(exp_dir, 'fold_real_ids.pickle'), 'wb') as handle: - pickle.dump(pid_fold_splits, handle) - - - #inference_analysis.find_pid_in_splits('0811a', exp_dir=exp_dir) - return - - -def copy_to_new_exp_dir(old_dir, new_dir): - - - cp_ids = r"rsync {} {}".format(os.path.join(old_dir, 'fold_real_ids.pickle'), new_dir) - rn_ids = "mv {} {}".format(os.path.join(new_dir, 'fold_real_ids.pickle'), os.path.join(new_dir, 'fold_ids.pickle')) - cp_params = r"""rsync -a --include='*/' --include='*best_params.pth' --exclude='*' --prune-empty-dirs - {} {}""".format(old_dir, new_dir) - cp_ranking = r"""rsync -a --include='*/' --include='epoch_ranking.npy' --exclude='*' --prune-empty-dirs - {} {}""".format(old_dir, new_dir) - cp_results = r"""rsync -a --include='*/' --include='pred_results.pkl' --exclude='*' --prune-empty-dirs - {} {}""".format(old_dir, new_dir) - - for cmd in [cp_ids, rn_ids, cp_params, cp_ranking, cp_results]: - subprocess.call(cmd, shell=True) - print("Setup {} for inference with ids, params from {}".format(new_dir, old_dir)) - - - -if __name__=="__main__": - exp_dir = '/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rgbin_bs8' - new_exp_dir = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rgbin_copiedparams' - #convert_folds_ids(exp_dir) - copy_to_new_exp_dir(exp_dir, new_exp_dir) \ No newline at end of file diff --git a/datasets/lidc/analyze_dataset.py b/datasets/lidc/analyze_dataset.py deleted file mode 100644 index cc79b0c..0000000 --- a/datasets/lidc/analyze_dataset.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -Created at 29/03/2019 19:20 -@author: gregor -""" - - - -if __name__ == "__main__": - - - - - - pass \ No newline at end of file diff --git a/datasets/lidc/annotated_xml_file_Mar 2010.rtf b/datasets/lidc/annotated_xml_file_Mar 2010.rtf new file mode 100644 index 0000000..c7a4b20 --- /dev/null +++ b/datasets/lidc/annotated_xml_file_Mar 2010.rtf @@ -0,0 +1,25171 @@ +{\rtf1\adeflang1033\ansi\ansicpg10000\uc1\adeff0\deff0\stshfdbch0\stshfloch0\stshfhich0\stshfbi0\deflang1033\deflangfe1033{\upr{\fonttbl{\f0\fnil\fcharset0\fprq2{\*\panose 00020206030504050203}Times New Roman{\*\falt Times New Roman};}{\f1\fnil\fcharset0\fprq2{\*\panose 00020b06040202020202}Arial{\*\falt Trebuchet MS};} +{\f2\fnil\fcharset0\fprq2{\*\panose 00020703090202050204}Courier New;}{\f3\fnil\fcharset2\fprq2{\*\panose 00020005000000000000}Symbol;}{\f4\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Times;} +{\f5\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Helvetica;}{\f6\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Courier;}{\f7\fnil\fcharset0\fprq2{\*\panose 00020b05030304040402}Geneva;} +{\f8\froman\fcharset77\fprq2{\*\panose 00000000000000000000}Tms Rmn;}{\f9\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}Helv;}{\f10\froman\fcharset77\fprq2{\*\panose 00000000000000000000}MS Serif;} +{\f11\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}MS Sans Serif;}{\f12\froman\fcharset77\fprq2{\*\panose 00000000000000000000}New York;}{\f13\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}System;} +{\f14\fnil\fcharset2\fprq2{\*\panose 00050201020108040807}Wingdings;}{\f15\fnil\fcharset78\fprq2 \'82\'6c\'82\'72 \'96\'be\'92\'a9;}{\f16\fnil\fcharset79\fprq2 \'b9\'d9\'c5\'c1;}{\f17\fnil\fcharset80\fprq2 \'cb\'ce\'cc\'e5;} +{\f18\fnil\fcharset81\fprq2 \'b7\'73\'b2\'d3\'a9\'fa\'c5\'e9;}{\f19\fnil\fcharset78\fprq2 \'82\'6c\'82\'72 \'83\'53\'83\'56\'83\'62\'83\'4e;}{\f20\fnil\fcharset0\fprq2{\*\panose 00020406040505050203}Century;} +{\f21\fnil\fcharset0\fprq2{\*\panose 00020b06040305040402}Verdana;}{\f22\fnil\fcharset0\fprq2{\*\panose 00020b06040305040402}Tahoma;}{\f23\fnil\fcharset2\fprq2{\*\panose 00010106010101010101}Monotype Sorts;} +{\f24\fnil\fcharset0\fprq2{\*\panose 00020204040303010108}Garamond;}{\f25\fnil\fcharset0\fprq2{\*\panose 00020f05020202040302}Calibri;}{\f26\fnil\fcharset0\fprq2{\*\panose 00020406020503050303}Book Antiqua;} +{\f27\froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f28\fmodern\fcharset128\fprq1{\*\panose 02020609040205080304}MS Mincho{\*\falt ?l?r ??_fc};}{\f29\fnil\fcharset0\fprq2 Lucida Grande;} +{\f30\fnil\fcharset0\fprq2{\*\panose 00020b0a060301010101}Abadi MT Condensed Extra Bold;}{\f31\fnil\fcharset0\fprq2{\*\panose 00020b03060301010101}Abadi MT Condensed Light;}{\f32\fnil\fcharset0\fprq2{\*\panose 00020906040200040203}American Typewriter;} +{\f33\fnil\fcharset0\fprq2{\*\panose 00020906060200040203}American Typewriter Condensed;}{\f34\fnil\fcharset0\fprq2{\*\panose 00020903040200040203}American Typewriter Light;}{\f35\fnil\fcharset0\fprq2{\*\panose 00020b05090000000000}Andale Mono;} +{\f36\fnil\fcharset0\fprq2{\*\panose 00030207020405060605}Apple Chancery;}{\f37\fnil\fcharset0\fprq2{\*\panose 00020b0a040201020202}Arial Black;}{\f38\fnil\fcharset0\fprq2{\*\panose 00020b05060202020302}Arial Narrow;} +{\f39\fnil\fcharset0\fprq2{\*\panose 00020f07040305040302}Arial Rounded MT Bold;}{\f40\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bank Gothic;}{\f41\fnil\fcharset0\fprq2{\*\panose 00020205020704010203}Baskerville;} +{\f42\fnil\fcharset0\fprq2{\*\panose 00020206020805050203}Baskerville Old Face;}{\f43\fnil\fcharset0\fprq2{\*\panose 00020207020704000202}Baskerville Semibold;}{\f44\fnil\fcharset0\fprq2{\*\panose 0004030905020b02020c}Bauhaus 93;} +{\f45\fnil\fcharset0\fprq2{\*\panose 00020205030603050203}Bell MT;}{\f46\fnil\fcharset0\fprq2{\*\panose 00020508060609050204}Bernard MT Condensed;}{\f47\fnil\fcharset0\fprq2{\*\panose 00020006030900000200}Big Caslon;} +{\f48\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Blackmoor LET;}{\f49\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni Ornaments ITC TT;}{\f50\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bodoni SvtyTwo ITC TT-Bold;} +{\f51\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo ITC TT-Book;}{\f52\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo ITC TT-BookIta;} +{\f53\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bodoni SvtyTwo OS ITC TT-Bold;}{\f54\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo OS ITC TT-Book;} +{\f55\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo OS ITC TT-BookIt;}{\f56\fnil\fcharset0\fprq2{\*\panose 00020506040505050202}Bookman Old Style;}{\f57\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bradley Hand ITC TT-Bold;} +{\f58\fnil\fcharset0\fprq2{\*\panose 0004030b070d0b020204}Braggadocio;}{\f59\fnil\fcharset0\fprq2{\*\panose 00020b09030607030202}Britannic Bold;}{\f60\fnil\fcharset0\fprq2{\*\panose 00030608020404060703}Brush Script MT;} +{\f61\fnil\fcharset0\fprq2{\*\panose 00020f07020304040302}Calibri Bold;}{\f62\fnil\fcharset0\fprq2{\*\panose 00020f07020304040a02}Calibri Bold Italic;}{\f63\fnil\fcharset0\fprq2{\*\panose 00020f05020202040a02}Calibri Italic;} +{\f64\fnil\fcharset0\fprq2{\*\panose 00020406030505050303}Calisto MT;}{\f65\fnil\fcharset0\fprq2{\*\panose 00020405030504060302}Cambria;}{\f66\fnil\fcharset0\fprq2{\*\panose 00020408030504060302}Cambria Bold;} +{\f67\fnil\fcharset0\fprq2{\*\panose 00020408030504060a02}Cambria Bold Italic;}{\f68\fnil\fcharset0\fprq2{\*\panose 00020405030504060a02}Cambria Italic;}{\f69\fnil\fcharset0\fprq2{\*\panose 00020e05020303030202}Candara;} +{\f70\fnil\fcharset0\fprq2{\*\panose 00020e07020303030202}Candara Bold;}{\f71\fnil\fcharset0\fprq2{\*\panose 00020e07020303030902}Candara Bold Italic;}{\f72\fnil\fcharset0\fprq2{\*\panose 00020e05020303030902}Candara Italic;} +{\f73\fnil\fcharset0\fprq2{\*\panose 00020b05020202020202}Century Gothic;}{\f74\fnil\fcharset0\fprq2{\*\panose 00020406040505050203}Century Schoolbook;}{\f75\fnil\fcharset0\fprq2{\*\panose 00030506020402020202}Chalkboard;} +{\f76\fnil\fcharset0\fprq2{\*\panose 00030506020402020202}Chalkboard Bold;}{\f77\fnil\fcharset0\fprq2{\*\panose 00020006030200000200}Cochin;}{\f78\fnil\fcharset0\fprq2{\*\panose 00040208050602020302}Colonna MT;} +{\f79\fnil\fcharset0\fprq2{\*\panose 00030f07020303020202}Comic Sans MS;}{\f80\fnil\fcharset0\fprq2{\*\panose 00020b06090202040302}Consolas;}{\f81\fnil\fcharset0\fprq2{\*\panose 00020b07090202040302}Consolas Bold;} +{\f82\fnil\fcharset0\fprq2{\*\panose 00020b07090202040a02}Consolas Bold Italic;}{\f83\fnil\fcharset0\fprq2{\*\panose 00020b06090202040a02}Consolas Italic;}{\f84\fnil\fcharset0\fprq2{\*\panose 00020306020503060303}Constantia;} +{\f85\fnil\fcharset0\fprq2{\*\panose 00020307020603060303}Constantia Bold;}{\f86\fnil\fcharset0\fprq2{\*\panose 00020307020603060a03}Constantia Bold Italic;}{\f87\fnil\fcharset0\fprq2{\*\panose 00020306020503060a03}Constantia Italic;} +{\f88\fnil\fcharset0\fprq2{\*\panose 000208090404030b0204}Cooper Black;}{\f89\fnil\fcharset0\fprq2{\*\panose 00020005040000000200}Copperplate;}{\f90\fnil\fcharset0\fprq2{\*\panose 00020e07050202060204}Copperplate Gothic Bold;} +{\f91\fnil\fcharset0\fprq2{\*\panose 00020e05070202060204}Copperplate Gothic Light;}{\f92\fnil\fcharset0\fprq2{\*\panose 00020006040300000200}Copperplate Light;}{\f93\fnil\fcharset0\fprq2{\*\panose 00020b05030202040202}Corbel;} +{\f94\fnil\fcharset0\fprq2{\*\panose 00020b07030202040202}Corbel Bold;}{\f95\fnil\fcharset0\fprq2{\*\panose 00020b07030202040902}Corbel Bold Italic;}{\f96\fnil\fcharset0\fprq2{\*\panose 00020b05030202040902}Corbel Italic;} +{\f97\fnil\fcharset0\fprq2{\*\panose 00040404040507020202}Curlz MT;}{\f98\fnil\fcharset0\fprq2{\*\panose 0004020505020e030405}Desdemona;}{\f99\fnil\fcharset0\fprq2{\*\panose 00020005030000000200}Didot;} +{\f100\fnil\fcharset0\fprq2{\*\panose 00030303020407070d08}Edwardian Script ITC;}{\f101\fnil\fcharset0\fprq2{\*\panose 00020907070805050203}Engravers MT;}{\f102\fnil\fcharset0\fprq2{\*\panose 00020b05030401020201}Euphemia UCAS;} +{\f103\fnil\fcharset0\fprq2{\*\panose 00020b08030401020201}Euphemia UCAS Bold;}{\f104\fnil\fcharset0\fprq2{\*\panose 00020b05030401020201}Euphemia UCAS Italic;}{\f105\fnil\fcharset0\fprq2{\*\panose 00020b05040202020502}Eurostile;} +{\f106\fnil\fcharset0\fprq2{\*\panose 000204060206030a0203}Footlight MT Light;}{\f107\fnil\fcharset0\fprq2{\*\panose 00020b06020202040203}Futura;}{\f108\fnil\fcharset0\fprq2{\*\panose 00020b05060202040302}Futura Condensed;} +{\f109\fnil\fcharset0\fprq2{\*\panose 00020004000000000000}Geeza Pro;}{\f110\fnil\fcharset0\fprq2{\*\panose 00020007000000000000}Geeza Pro Bold;}{\f111\fnil\fcharset0\fprq2{\*\panose 00020405020504050203}Georgia;} +{\f112\fnil\fcharset0\fprq2{\*\panose 00020b05020201040202}Gill Sans;}{\f113\fnil\fcharset0\fprq2{\*\panose 00020b03020201040202}Gill Sans Light;}{\f114\fnil\fcharset0\fprq2{\*\panose 00020b0a020201040202}Gill Sans Ultra Bold;} +{\f115\fnil\fcharset0\fprq2{\*\panose 00020308080206010101}Gloucester MT Extra Condensed;}{\f116\fnil\fcharset0\fprq2{\*\panose 00020205020503050203}Goudy Old Style;}{\f117\fnil\fcharset0\fprq2{\*\panose 00020b07060409020602}Haettenschweiler;} +{\f118\fnil\fcharset0\fprq2{\*\panose 0004040505050a020207}Harrington;}{\f119\fnil\fcharset0\fprq2{\*\panose 00020005030000000200}Helvetica Neue;}{\f120\fnil\fcharset0\fprq2{\*\panose 0002000a060000000200}Helvetica Neue Black Condensed;} +{\f121\fnil\fcharset0\fprq2{\*\panose 00020008060000000200}Helvetica Neue Bold Condensed;}{\f122\fnil\fcharset0\fprq2{\*\panose 00020004030000000200}Helvetica Neue Light;} +{\f123\fnil\fcharset0\fprq2{\*\panose 00020002060000000200}Helvetica Neue UltraLight;}{\f124\fnil\fcharset0\fprq2{\*\panose 00020005050000000200}Herculanum;}{\f125\fnil\fcharset0\fprq2{\*\panose 00020306020505060202}Hoefler Text;} +{\f126\fnil\fcharset2\fprq2{\*\panose 00000000000000000000}Hoefler Text Ornaments;}{\f127\fnil\fcharset0\fprq2{\*\panose 00020b08060309020502}Impact;}{\f128\fnil\fcharset0\fprq2{\*\panose 00040206050603030302}Imprint MT Shadow;} +{\f129\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}InaiMathi;}{\f130\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Jazz LET;}{\f131\fnil\fcharset0\fprq2{\*\panose 00040307050d0c020207}Kino MT;} +{\f132\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}Lucida Blackletter;}{\f133\fnil\fcharset0\fprq2{\*\panose 00020406020505050203}Lucida Bright;}{\f134\fnil\fcharset0\fprq2{\*\panose 00030101010101010101}Lucida Calligraphy;} +{\f135\fnil\fcharset0\fprq2{\*\panose 00020606020505050202}Lucida Fax;}{\f136\fnil\fcharset0\fprq2{\*\panose 00030101010101010101}Lucida Handwriting;}{\f137\fnil\fcharset0\fprq2{\*\panose 00020b06020305040202}Lucida Sans;} +{\f138\fnil\fcharset0\fprq2{\*\panose 00020b05090305040302}Lucida Sans Typewriter;}{\f139\fnil\fcharset0\fprq2{\*\panose 00020004000000000000}Marker Felt;}{\f140\fnil\fcharset0\fprq2{\*\panose 00030208020606020702}Matura MT Script Capitals;} +{\f141\fnil\fcharset0\fprq2{\*\panose 00030907020304070204}Mistral;}{\f142\fnil\fcharset0\fprq2{\*\panose 00020707040705050203}Modern No. 20;}{\f143\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Mona Lisa Solid ITC TT;} +{\f144\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Monaco;}{\f145\fnil\fcharset0\fprq2{\*\panose 00030101010102010101}Monotype Corsiva;}{\f146\fnil\fcharset2\fprq2{\*\panose 00020005000000000000}MT Extra;} +{\f147\fnil\fcharset0\fprq2{\*\panose 00020b05040202030202}News Gothic MT;}{\f148\fnil\fcharset0\fprq2{\*\panose 00040506020807020202}Onyx;}{\f149\fnil\fcharset0\fprq2{\*\panose 00020005030600000200}Optima;} +{\f150\fnil\fcharset0\fprq2{\*\panose 0002000b030000000200}Optima ExtraBlack;}{\f151\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Palatino;}{\f152\fnil\fcharset0\fprq2{\*\panose 00020b06020402000203}Papyrus;} +{\f153\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Party LET;}{\f154\fnil\fcharset0\fprq2{\*\panose 00020205020605050208}Perpetua Titling MT;}{\f155\fnil\fcharset0\fprq2{\*\panose 00020200000000000000}Plantagenet Cherokee;} +{\f156\fnil\fcharset0\fprq2{\*\panose 00040506030a06020202}Playbill;}{\f157\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Princetown LET;}{\f158\fnil\fcharset0\fprq2{\*\panose 00020606030202050204}Rockwell;} +{\f159\fnil\fcharset0\fprq2{\*\panose 00020609030405050204}Rockwell Extra Bold;}{\f160\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Santa Fe LET;}{\f161\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Savoye LET;} +{\f162\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}SchoolHouse Printed A;}{\f163\fnil\fcharset0\fprq2{\*\panose 00020d05020202040202}Skia;}{\f164\fnil\fcharset0\fprq2{\*\panose 00020006030800000900}Snell Roundhand;} +{\f165\fnil\fcharset0\fprq2{\*\panose 0002000a040900000900}Snell Roundhand Black;}{\f166\fnil\fcharset0\fprq2{\*\panose 00020008030900000900}Snell Roundhand Bold;}{\f167\fnil\fcharset0\fprq2{\*\panose 00040409050d08020204}Stencil;} +{\f168\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Stone Sans ITC TT-Bold;}{\f169\fnil\fcharset0\fprq2{\*\panose 00000006000000000000}Stone Sans ITC TT-Semi;}{\f170\fnil\fcharset0\fprq2{\*\panose 00000006000000000000}Stone Sans ITC TT-SemiIta;} +{\f171\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Synchro LET;}{\f172\fnil\fcharset0\fprq2{\*\panose 00020b06030202020202}Trebuchet MS;}{\f173\fnil\fcharset2\fprq2{\*\panose 00050301020105090607}Webdings;} +{\f174\fnil\fcharset0\fprq2{\*\panose 00020a0a070505050204}Wide Latin;}{\f175\fnil\fcharset2\fprq2{\*\panose 00050201020105070707}Wingdings 2;}{\f176\fnil\fcharset2\fprq2{\*\panose 00050401020108070707}Wingdings 3;} +{\f177\fnil\fcharset2\fprq2{\*\panose 00050201020107040206}Zapf Dingbats;}{\f178\fnil\fcharset0\fprq2{\*\panose 0003030300040707070c}Zapfino;}{\f179\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Courier CE;} +{\f180\fnil\fcharset88\fprq2{\*\panose 00020b05030304040402}Geneva CE;}{\f181\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Helvetica CE;}{\f182\fnil\fcharset88\fprq2{\*\panose 00020004000000000000}Krungthep;} +{\f183\fnil\fcharset88\fprq2{\*\panose 00020b06000405020202}Lucida Grande CE;}{\f184\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Monaco CE;}{\f185\fnil\fcharset88\fprq2{\*\panose 00000004000000000000}Silom;} +{\f186\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Times CE;}{\f187\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Ayuthaya;}{\f188\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Charcoal CY;} +{\f189\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Geneva CY;}{\f190\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Helvetica CY;}{\f191\fnil\fcharset89\fprq2{\*\panose 00020b06000405020202}Lucida Grande CY;} +{\f192\fnil\fcharset89\fprq2{\*\panose 00020005000000000000}Monaco CY;}{\f193\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Sathu;}{\f194\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Thonburi;} +{\f195\fnil\fcharset89\fprq2{\*\panose 00020005000000000000}Times CY;}{\f196\fnil\fcharset81\fprq2{\*\panose 00020005000000000000}Apple LiGothic Medium;}{\f197\fnil\fcharset81\fprq2{\*\panose 00020005000000000000}Apple LiSung Light;} +{\f198\fnil\fcharset81\fprq2{\*\panose 00020106010001010101}BiauKai;}{\f199\fnil\fcharset81\fprq2 \'c4\'d7\'a7\'ba Pro;}{\f200\fnil\fcharset81\fprq2 \'c4\'d7\'b6\'c2 Pro;}{\f201\fnil\fcharset80\fprq2 GB18030 Bitmap;} +{\f202\fnil\fcharset80\fprq2{\*\panose 00020005000000000000}Hei;}{\f203\fnil\fcharset80\fprq2{\*\panose 00020005000000000000}Kai;}{\f204\fnil\fcharset80\fprq2 \'bb\'aa\'ce\'c4\'b7\'c2\'cb\'ce;} +{\f205\fnil\fcharset80\fprq2 \'bb\'aa\'ce\'c4\'ba\'da\'cc\'e5;}{\f206\fnil\fcharset80\fprq2 \'bb\'aa\'ce\'c4\'bf\'ac\'cc\'e5;}{\f207\fnil\fcharset80\fprq2 \'bb\'aa\'ce\'c4\'cb\'ce\'cc\'e5;}{\f208\fnil\fcharset80\fprq2 \'bb\'aa\'ce\'c4\'cf\'b8\'ba\'da;} +{\f209\fnil\fcharset78\fprq2 \'82\'6c\'82\'72 \'82\'6f\'83\'53\'83\'56\'83\'62\'83\'4e;}{\f210\fnil\fcharset78\fprq2 \'82\'6c\'82\'72 \'82\'6f\'96\'be\'92\'a9;}{\f211\fnil\fcharset78\fprq2{\*\panose 00020b06000000000000}Osaka;} +{\f212\fnil\fcharset78\fprq2 Osaka\'81\'7c\'93\'99\'95\'9d;}{\f213\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'8a\'70\'83\'53 Pro W3;}{\f214\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'8a\'70\'83\'53 Pro W6;} +{\f215\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'8a\'70\'83\'53 Std W8;}{\f216\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'8a\'db\'83\'53 Pro W4;} +{\f217\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'96\'be\'92\'a9 Pro W3;}{\f218\fnil\fcharset78\fprq2 \'83\'71\'83\'89\'83\'4d\'83\'6d\'96\'be\'92\'a9 Pro W6;}{\f219\fnil\fcharset79\fprq2 #PC\'b8\'ed\'c1\'b6;} +{\f220\fnil\fcharset79\fprq2 #\'b1\'c3\'bc\'ad\'c3\'bc;}{\f221\fnil\fcharset79\fprq2 #\'c7\'ca\'b1\'e2\'c3\'bc;}{\f222\fnil\fcharset79\fprq2 #\'c7\'ec\'b5\'e5\'b6\'f3\'c0\'ceA;}{\f223\fnil\fcharset79\fprq2{\*\panose 00020005000000000000}AppleGothic;} +{\f224\fnil\fcharset79\fprq2{\*\panose 00020005000000000000}AppleMyungjo;}{\f225\fnil\fcharset79\fprq2 \'b1\'bc\'b8\'b2;}{\f226\fswiss\fcharset128\fprq2{\*\panose 020b0604020202020204}Arial Unicode MS{\*\falt Times};} +{\f227\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}Bitstream Vera Sans{\*\falt Times New Roman};}{\f228\fnil\fcharset77\fprq0{\*\panose 00000000000000000000}LucidaGrande{\*\falt Lucida Grande};} +{\f229\fnil\fcharset129\fprq1{\*\panose 02030600000101010101}Batang{\*\falt \'a2\'ae\'a1\'d7IoUAA};}{\f230\fnil\fcharset134\fprq2{\*\panose 02010600030101010101}SimSun{\*\falt ???_\'a8\'ac??};} +{\f231\fnil\fcharset136\fprq2{\*\panose 02010601000101010101}PMingLiU{\*\falt !Ps2OcuAe};}{\f232\fmodern\fcharset128\fprq1{\*\panose 020b0609070205080204}MS Gothic{\*\falt ?l?r ?S?V?b?N};} +{\f233\fmodern\fcharset129\fprq1{\*\panose 020b0600000101010101}Dotum{\*\falt _\'a2\'e7Ii_\'a2\'e7E_\'a2\'e7_\'a1\'a9_\'a2\'e7E?o};}{\f234\fmodern\fcharset134\fprq1{\*\panose 02010600030101010101}SimHei{\*\falt o?_\'a8\'ac2?_\'a8\'ac??};} +{\f235\fmodern\fcharset136\fprq1{\*\panose 02010609000101010101}MingLiU{\*\falt 2OcuAe};}{\f236\froman\fcharset128\fprq1{\*\panose 02020609040305080305}Mincho{\*\falt ??\'81\'66c};} +{\f237\froman\fcharset129\fprq1{\*\panose 020b0600000101010101}Gulim{\*\falt \'a2\'ae___\'a2\'ae\'a1\'d7u_\'a2\'e7\'a2\'ae\'a1\'bfu_\'a2\'e7E_\'a2\'e7_\'a1\'a9\'a2\'ae\'a1\'d7I_\'a2\'e7A};} +{\f238\froman\fcharset222\fprq2{\*\panose 02020603050405020304}Angsana New;}{\f239\froman\fcharset222\fprq2{\*\panose 020b0304020202020204}Cordia New;}{\f240\fnil\fcharset0\fprq2{\*\panose 00000400000000000000}Mangal;} +{\f241\fnil\fcharset0\fprq2{\*\panose 02000400000000000000}Latha;}{\f242\froman\fcharset0\fprq2{\*\panose 010a0502050306030303}Sylfaen;}{\f243\fnil\fcharset0\fprq2{\*\panose 01010600010101010101}Vrinda;} +{\f244\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Raavi;}{\f245\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Shruti;}{\f246\froman\fcharset77\fprq2{\*\panose 00000400000000000000}Sendnya;} +{\f247\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Gautami;}{\f248\fnil\fcharset0\fprq2{\*\panose 00000400000000000000}Tunga;}{\f249\fscript\fcharset0\fprq2{\*\panose 00000000000000000000}Estrangelo Edessa;} +{\f250\froman\fcharset0\fprq2{\*\panose 02020503030404060203}Kartika;}{\f251\froman\fcharset0\fprq0{\*\panose 00000000000000000000}TimesNewRoman{\*\falt Times New Roman};}{\f252\fnil\fcharset2\fprq2{\*\panose 00000000000000000000}Marlett;} +{\f253\fmodern\fcharset0\fprq1{\*\panose 020b0609040504020204}Lucida Console;}{\f254\fswiss\fcharset0\fprq2{\*\panose 020b0602030504020204}Lucida Sans Unicode;}{\f255\fswiss\fcharset0\fprq2{\*\panose 020b0603020102020204}Franklin Gothic Medium;} +{\f256\froman\fcharset0\fprq2{\*\panose 02040502050505030304}Palatino Linotype;}{\f257\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}MV Boli;}{\f258\fswiss\fcharset0\fprq2{\*\panose 020b0604020202020204}Microsoft Sans Serif;} +{\f259\fswiss\fcharset0\fprq2{\*\panose 020b0503020202020204}Agency FB;}{\f260\fdecor\fcharset0\fprq2{\*\panose 04020705040a02060702}Algerian;}{\f261\fswiss\fcharset128\fprq2{\*\panose 020b0604020202020204}@Arial Unicode MS;} +{\f262\fswiss\fcharset0\fprq2{\*\panose 020e0602020502020306}Berlin Sans FB;}{\f263\fdecor\fcharset0\fprq2{\*\panose 04020505051007020d02}Blackadder ITC;}{\f264\froman\fcharset0\fprq2{\*\panose 02070603080606020203}Bodoni MT;} +{\f265\froman\fcharset0\fprq2{\*\panose 02070a03080606020203}Bodoni MT Black;}{\f266\froman\fcharset0\fprq2{\*\panose 02070606080606020203}Bodoni MT Condensed;}{\f267\froman\fcharset0\fprq2{\*\panose 02070706080601050204}Bodoni MT Poster Compressed;} +{\f268\fscript\fcharset0\fprq2{\*\panose 03070402050302030203}Bradley Hand ITC;}{\f269\fdecor\fcharset0\fprq2{\*\panose 04040905080b02020502}Broadway;}{\f270\froman\fcharset0\fprq2{\*\panose 0207040306080b030204}Californian FB;} +{\f271\froman\fcharset0\fprq2{\*\panose 020a0402060406010301}Castellar;}{\f272\froman\fcharset0\fprq2{\*\panose 02030504050205020304}Centaur;}{\f273\fdecor\fcharset0\fprq2{\*\panose 04020404031007020602}Chiller;} +{\f274\froman\fcharset0\fprq2{\*\panose 02020904090505020303}Elephant;}{\f275\fswiss\fcharset0\fprq2{\*\panose 020b0907030504020204}Eras Bold ITC;}{\f276\fswiss\fcharset0\fprq2{\*\panose 020b0805030504020804}Eras Demi ITC;} +{\f277\fswiss\fcharset0\fprq2{\*\panose 020b0402030504020804}Eras Light ITC;}{\f278\fswiss\fcharset0\fprq2{\*\panose 020b0602030504020804}Eras Medium ITC;}{\f279\fdecor\fcharset0\fprq2{\*\panose 04060505060202020a04}Felix Titling;} +{\f280\fscript\fcharset0\fprq2{\*\panose 03060902040502070203}Forte;}{\f281\fswiss\fcharset0\fprq2{\*\panose 020b0503020102020204}Franklin Gothic Book;}{\f282\fswiss\fcharset0\fprq2{\*\panose 020b0703020102020204}Franklin Gothic Demi;} +{\f283\fswiss\fcharset0\fprq2{\*\panose 020b0706030402020204}Franklin Gothic Demi Cond;}{\f284\fswiss\fcharset0\fprq2{\*\panose 020b0903020102020204}Franklin Gothic Heavy;} +{\f285\fswiss\fcharset0\fprq2{\*\panose 020b0606030402020204}Franklin Gothic Medium Cond;}{\f286\fscript\fcharset0\fprq2{\*\panose 030804020302050b0404}Freestyle Script;}{\f287\fscript\fcharset0\fprq2{\*\panose 03020402040607040605}French Script MT;} +{\f288\fdecor\fcharset0\fprq2{\*\panose 04040504061007020d02}Gigi;}{\f289\fswiss\fcharset0\fprq2{\*\panose 020b0902020104020203}Gill Sans MT Ext Condensed Bold;}{\f290\fswiss\fcharset0\fprq2{\*\panose 020b0502020104020203}Gill Sans MT;} +{\f291\fswiss\fcharset0\fprq2{\*\panose 020b0506020104020203}Gill Sans MT Condensed;}{\f292\fswiss\fcharset0\fprq2{\*\panose 020b0a06020104020203}Gill Sans Ultra Bold Condensed;}{\f293\froman\fcharset0\fprq2{\*\panose 0202090407030b020401}Goudy Stout;} +{\f294\fdecor\fcharset0\fprq2{\*\panose 04030604020f02020d02}Harlow Solid Italic;}{\f295\froman\fcharset0\fprq2{\*\panose 02040502050506030303}High Tower Text;}{\f296\fdecor\fcharset0\fprq2{\*\panose 04090605060d06020702}Jokerman;} +{\f297\fdecor\fcharset0\fprq2{\*\panose 04040403040a02020202}Juice ITC;}{\f298\fscript\fcharset0\fprq2{\*\panose 03050502040202030202}Kristen ITC;}{\f299\fscript\fcharset0\fprq2{\*\panose 030304020206070d0d06}Kunstler Script;} +{\f300\fnil\fcharset2\fprq2{\*\panose 05010100010000000000}MS Outlook;}{\f301\fdecor\fcharset0\fprq2{\*\panose 04030805050802020d02}Magneto;}{\f302\fswiss\fcharset0\fprq2{\*\panose 020e0502030308020204}Maiandra GD;} +{\f303\fdecor\fcharset0\fprq2{\*\panose 04020502070703030202}Niagara Engraved;}{\f304\fdecor\fcharset0\fprq2{\*\panose 04020502070702020202}Niagara Solid;}{\f305\fmodern\fcharset0\fprq2{\*\panose 02010509020102010303}OCR A Extended;} +{\f306\fscript\fcharset0\fprq2{\*\panose 03040902040508030806}Old English Text MT;}{\f307\fscript\fcharset0\fprq2{\*\panose 030303020206070c0b05}Palace Script MT;}{\f308\fscript\fcharset0\fprq2{\*\panose 03040602040708040804}Parchment;} +{\f309\froman\fcharset0\fprq2{\*\panose 02020502060401020303}Perpetua;}{\f310\froman\fcharset0\fprq2{\*\panose 02080502050505020702}Poor Richard;}{\f311\fscript\fcharset0\fprq2{\*\panose 03060402040406080204}Pristina;} +{\f312\fscript\fcharset0\fprq2{\*\panose 03070502040507070304}Rage Italic;}{\f313\fdecor\fcharset0\fprq2{\*\panose 04040805050809020602}Ravie;}{\f314\froman\fcharset0\fprq2{\*\panose 02060603050405020104}Rockwell Condensed;} +{\f315\fscript\fcharset0\fprq2{\*\panose 030604020304060b0204}Informal Roman;}{\f316\fscript\fcharset0\fprq2{\*\panose 03040602040607080904}Script MT Bold;}{\f317\fdecor\fcharset0\fprq2{\*\panose 04020904020102020604}Showcard Gothic;} +{\f318\fdecor\fcharset0\fprq2{\*\panose 04040a07060a02020202}Snap ITC;}{\f319\fswiss\fcharset0\fprq2{\*\panose 020b0602020104020603}Tw Cen MT;}{\f320\fswiss\fcharset0\fprq2{\*\panose 020b0606020104020203}Tw Cen MT Condensed;} +{\f321\fdecor\fcharset0\fprq2{\*\panose 04020404030d07020202}Tempus Sans ITC;}{\f322\fscript\fcharset0\fprq2{\*\panose 03070502030502020203}Viner Hand ITC;}{\f323\fscript\fcharset0\fprq2{\*\panose 03020602050506090804}Vivaldi;} +{\f324\fscript\fcharset0\fprq2{\*\panose 03050402040407070305}Vladimir Script;}{\f325\fswiss\fcharset0\fprq2{\*\panose 020e0802020502020306}Berlin Sans FB Demi;}{\f326\fswiss\fcharset0\fprq2{\*\panose 020b0604030504040204}MS Reference Sans Serif;} +{\f327\fnil\fcharset2\fprq2{\*\panose 05000500000000000000}MS Reference Specialty;}{\f328\fswiss\fcharset0\fprq2{\*\panose 020b0803020202020204}Tw Cen MT Condensed Extra Bold;}{\f329\fnil\fcharset2\fprq2{\*\panose 05010101010101010101}Bookshelf Symbol 7;} +{\f330\fswiss\fcharset0\fprq2{\*\panose 020b0503030403020204}Myriad Web Pro;}{\f331\fswiss\fcharset0\fprq2{\*\panose 020b0506030403020204}Myriad Web Pro Condensed;}{\f332\fnil\fcharset129\fprq0{\*\panose 00000000000000000000}@Batang;} +{\f333\fnil\fcharset0\fprq0{\*\panose 00000000000000000000}AdvTimes;}{\f334\fnil\fcharset2\fprq2{\*\panose 05000400000000000000}ExtraS 1;}{\f335\froman\fcharset0\fprq0{\*\panose 00000000000000000000}sans-serif{\*\falt Times New Roman};} +{\f336\fnil\fcharset134\fprq0{\*\panose 00000000000000000000}@SimSun;}{\f337\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f338\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Balti;} +{\f339\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Gre;}{\f340\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Bal;} +{\f341\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans Ultra Bold Condensed ;}{\f342\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};} +{\f343\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT Condensed Extra Bold ;}{\f344\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Times New Roman CE{\*\falt Times New Roman};} +{\f345\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Times New Roman Cyr{\*\falt Times New Roman};}{\f346\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Times New Roman Greek{\*\falt Times New Roman};} +{\f347\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Times New Roman Tur{\*\falt Times New Roman};}{\f348\froman\fcharset177\fprq2{\*\panose 00000000000000000000}Times New Roman (Hebrew){\*\falt Times New Roman};} +{\f349\froman\fcharset178\fprq2{\*\panose 00000000000000000000}Times New Roman (Arabic){\*\falt Times New Roman};}{\f350\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Times New Roman Baltic{\*\falt Times New Roman};} +{\f351\froman\fcharset163\fprq2{\*\panose 00000000000000000000}{\*\falt };}{\f352\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial CE{\*\falt Trebuchet MS};} +{\f353\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Cyr{\*\falt Trebuchet MS};}{\f354\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Greek{\*\falt Trebuchet MS};} +{\f355\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Tur{\*\falt Trebuchet MS};}{\f356\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Arial (Hebrew){\*\falt Trebuchet MS};} +{\f357\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Arial (Arabic){\*\falt Trebuchet MS};}{\f358\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Baltic{\*\falt Trebuchet MS};} +{\f359\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}{\*\falt };}{\f360\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Courier New CE;}{\f361\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Courier New Cyr;} +{\f362\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Courier New Greek;}{\f363\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Courier New Tur;}{\f364\fmodern\fcharset177\fprq1{\*\panose 00000000000000000000}Courier New (Hebrew);} +{\f365\fmodern\fcharset178\fprq1{\*\panose 00000000000000000000}Courier New (Arabic);}{\f366\fmodern\fcharset186\fprq1{\*\panose 00000000000000000000}Courier New Baltic;}{\f367\fmodern\fcharset163\fprq1{\*\panose 00000000000000000000};} +{\f368\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Helvetica Cyr;}{\f369\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Helvetica Greek;}{\f370\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Helvetica Tur;} +{\f371\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Helvetica (Hebrew);}{\f372\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Helvetica (Arabic);}{\f373\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Helvetica Baltic;} +{\f374\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f375\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Century CE;}{\f376\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Century Cyr;} +{\f377\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Century Greek;}{\f378\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Century Tur;}{\f379\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Century Baltic;} +{\f380\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Sylfaen CE;}{\f381\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Sylfaen Cyr;}{\f382\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Sylfaen Greek;} +{\f383\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Sylfaen Tur;}{\f384\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Sylfaen Baltic;}{\f385\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Hebrew);} +{\f386\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Arabic);}{\f387\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f388\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Thai);} +{\f389\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tahoma CE;}{\f390\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Tahoma Cyr;}{\f391\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Tahoma Greek;} +{\f392\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Tahoma Tur;}{\f393\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Tahoma (Hebrew);}{\f394\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Tahoma (Arabic);} +{\f395\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Tahoma Baltic;}{\f396\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f397\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Tahoma (Thai);} +{\f398\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Calibri CE;}{\f399\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Calibri Cyr;}{\f400\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Calibri Greek;} +{\f401\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Calibri Tur;}{\f402\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Calibri Baltic;}{\f403\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Cambria CE;} +{\f404\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Cambria Cyr;}{\f405\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Cambria Greek;}{\f406\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Cambria Tur;} +{\f407\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Cambria Baltic;}{\f408\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial Narrow CE;}{\f409\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Narrow Cyr;} +{\f410\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Narrow Greek;}{\f411\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Narrow Tur;}{\f412\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Narrow Baltic;} +{\f413\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Times Cyr;}{\f414\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Times Greek;}{\f415\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Times Tur;} +{\f416\froman\fcharset177\fprq2{\*\panose 00000000000000000000}Times (Hebrew);}{\f417\froman\fcharset178\fprq2{\*\panose 00000000000000000000}Times (Arabic);}{\f418\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Times Baltic;} +{\f419\froman\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f420\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Georgia CE;}{\f421\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Georgia Cyr;} +{\f422\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Georgia Greek;}{\f423\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Georgia Tur;}{\f424\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Georgia Baltic;} +{\f425\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Century Gothic CE;}{\f426\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Century Gothic Cyr;}{\f427\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Century Gothic Greek;} +{\f428\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Century Gothic Tur;}{\f429\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Century Gothic Baltic;}{\f430\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Lucida Console CE;} +{\f431\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Lucida Console Cyr;}{\f432\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Lucida Console Greek;}{\f433\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Lucida Console Tur;} +{\f434\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode CE;}{\f435\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Cyr;} +{\f436\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Greek;}{\f437\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Tur;} +{\f438\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode (Hebrew);}{\f439\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Verdana CE;}{\f440\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Verdana Cyr;} +{\f441\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Verdana Greek;}{\f442\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Verdana Tur;}{\f443\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Verdana Baltic;} +{\f444\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f445\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial Black CE;}{\f446\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Black Cyr;} +{\f447\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Black Greek;}{\f448\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Black Tur;}{\f449\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Black Baltic;} +{\f450\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Comic Sans MS CE;}{\f451\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Comic Sans MS Cyr;}{\f452\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Comic Sans MS Greek;} +{\f453\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Comic Sans MS Tur;}{\f454\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Comic Sans MS Baltic;}{\f455\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Impact CE;} +{\f456\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Impact Cyr;}{\f457\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Impact Greek;}{\f458\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Impact Tur;} +{\f459\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Impact Baltic;}{\f460\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium CE;} +{\f461\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cyr;}{\f462\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Greek;} +{\f463\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Tur;}{\f464\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Baltic;} +{\f465\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Palatino Linotype CE;}{\f466\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Palatino Linotype Cyr;} +{\f467\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Palatino Linotype Greek;}{\f468\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Palatino Linotype Tur;} +{\f469\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Palatino Linotype Baltic;}{\f470\froman\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f471\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Trebuchet MS CE;} +{\f472\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Trebuchet MS Cyr;}{\f473\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Trebuchet MS Greek;}{\f474\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Trebuchet MS Tur;} +{\f475\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Trebuchet MS Baltic;}{\f476\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif CE;} +{\f477\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Cyr;}{\f478\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Greek;} +{\f479\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Tur;}{\f480\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Hebrew);} +{\f481\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Arabic);}{\f482\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Baltic;} +{\f483\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Thai);}{\f484\fswiss\fcharset0\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Western;} +{\f485\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS CE;}{\f486\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Cyr;} +{\f487\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Greek;}{\f488\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Tur;} +{\f489\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Hebrew);}{\f490\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Arabic);} +{\f491\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Baltic;}{\f492\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000};}{\f493\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Thai);} +{\f494\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Bodoni MT Poster Compressed Tur;}{\f495\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Book Antiqua CE;} +{\f496\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Book Antiqua Cyr;}{\f497\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Book Antiqua Greek;}{\f498\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Book Antiqua Tur;} +{\f499\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Book Antiqua Baltic;}{\f500\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Bookman Old Style CE;}{\f501\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Bookman Old Style Cyr;} +{\f502\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Bookman Old Style Greek;}{\f503\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Bookman Old Style Tur;} +{\f504\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Bookman Old Style Baltic;}{\f505\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Century Schoolbook CE;} +{\f506\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Century Schoolbook Cyr;}{\f507\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Century Schoolbook Greek;} +{\f508\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Century Schoolbook Tur;}{\f509\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Century Schoolbook Baltic;} +{\f510\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book CE;}{\f511\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Cyr;} +{\f512\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Greek;}{\f513\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Tur;} +{\f514\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Baltic;}{\f515\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi CE;} +{\f516\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cyr;}{\f517\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Greek;} +{\f518\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Tur;}{\f519\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Baltic;} +{\f520\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond CE;}{\f521\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Cyr;} +{\f522\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Greek;}{\f523\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Tur;} +{\f524\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy CE;}{\f525\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Cyr;} +{\f526\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Greek;}{\f527\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Tur;} +{\f528\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Baltic;}{\f529\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond CE;} +{\f530\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Cyr;}{\f531\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Tur;} +{\f532\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Garamond CE;}{\f533\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Garamond Cyr;}{\f534\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Garamond Greek;} +{\f535\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Garamond Tur;}{\f536\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Garamond Baltic;}{\f537\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans MT CE;} +{\f538\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans MT Condensed CE;}{\f539\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans Ultra Bold CE;} +{\f540\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Haettenschweiler CE;}{\f541\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Haettenschweiler Cyr;} +{\f542\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Haettenschweiler Greek;}{\f543\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Haettenschweiler Tur;} +{\f544\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Haettenschweiler Baltic;}{\f545\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Mistral CE;}{\f546\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Mistral Cyr;} +{\f547\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Mistral Greek;}{\f548\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Mistral Tur;}{\f549\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Mistral Baltic;} +{\f550\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Monotype Corsiva CE;}{\f551\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Cyr;} +{\f552\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Greek;}{\f553\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Tur;} +{\f554\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Baltic;}{\f555\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT CE;}{\f556\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT Condensed CE;} +{\f557\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif CE;}{\f558\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Cyr;} +{\f559\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Greek;}{\f560\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Tur;} +{\f561\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Baltic;}{\f562\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Myriad Web Pro CE;} +{\f563\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Tur;}{\f564\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Baltic;} +{\f565\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed CE;}{\f566\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed Tur;} +{\f567\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed Baltic;}{\f568\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Candara CE;}{\f569\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Candara Cyr;} +{\f570\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Candara Greek;}{\f571\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Candara Tur;}{\f572\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Candara Baltic;} +{\f573\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Consolas CE;}{\f574\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Consolas Cyr;}{\f575\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Consolas Greek;} +{\f576\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Consolas Tur;}{\f577\fmodern\fcharset186\fprq1{\*\panose 00000000000000000000}Consolas Baltic;}{\f578\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Constantia CE;} +{\f579\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Constantia Cyr;}{\f580\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Constantia Greek;}{\f581\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Constantia Tur;} +{\f582\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Constantia Baltic;}{\f583\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Corbel CE;}{\f584\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Corbel Cyr;} +{\f585\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Corbel Greek;}{\f586\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Corbel Tur;}{\f587\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Corbel Baltic;} +{\f750\froman\fcharset238\fprq2 Cambria Math CE;}{\f751\froman\fcharset204\fprq2 Cambria Math Cyr;}{\f753\froman\fcharset161\fprq2 Cambria Math Greek;}{\f754\froman\fcharset162\fprq2 Cambria Math Tur;}{\f755\froman\fcharset186\fprq2 Cambria Math Baltic;} +{\f758\fmodern\fcharset0\fprq1 ?l?r ??_fc{\*\falt ?l?r ??_fc};}{\f756\fmodern\fcharset238\fprq1 ?l?r ??_fc CE{\*\falt ?l?r ??_fc};}{\f757\fmodern\fcharset204\fprq1 ?l?r ??_fc Cyr{\*\falt ?l?r ??_fc};} +{\f759\fmodern\fcharset161\fprq1 ?l?r ??_fc Greek{\*\falt ?l?r ??_fc};}{\f760\fmodern\fcharset162\fprq1 ?l?r ??_fc Tur{\*\falt ?l?r ??_fc};}{\f761\fmodern\fcharset186\fprq1 ?l?r ??_fc Baltic{\*\falt ?l?r ??_fc};} +{\f1946\fswiss\fcharset0\fprq2 Times{\*\falt Times};}{\f1944\fswiss\fcharset238\fprq2 Times CE{\*\falt Times};}{\f1945\fswiss\fcharset204\fprq2 Times Cyr{\*\falt Times};}{\f1947\fswiss\fcharset161\fprq2 Times Greek{\*\falt Times};} +{\f1948\fswiss\fcharset162\fprq2 Times Tur{\*\falt Times};}{\f1949\fswiss\fcharset186\fprq2 Times Baltic{\*\falt Times};}{\f2322\fswiss\fcharset238\fprq2 Gill Sans MT Ext Condensed Bold CE;} +}{\*\ud{\fonttbl{\f0\fnil\fcharset0\fprq2{\*\panose 00020206030504050203}Times New Roman{\*\falt Times New Roman};}{\f1\fnil\fcharset0\fprq2{\*\panose 00020b06040202020202}Arial{\*\falt Trebuchet MS};} +{\f2\fnil\fcharset0\fprq2{\*\panose 00020703090202050204}Courier New;}{\f3\fnil\fcharset2\fprq2{\*\panose 00020005000000000000}Symbol;}{\f4\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Times;} +{\f5\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Helvetica;}{\f6\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Courier;}{\f7\fnil\fcharset0\fprq2{\*\panose 00020b05030304040402}Geneva;} +{\f8\froman\fcharset77\fprq2{\*\panose 00000000000000000000}Tms Rmn;}{\f9\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}Helv;}{\f10\froman\fcharset77\fprq2{\*\panose 00000000000000000000}MS Serif;} +{\f11\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}MS Sans Serif;}{\f12\froman\fcharset77\fprq2{\*\panose 00000000000000000000}New York;}{\f13\fswiss\fcharset77\fprq2{\*\panose 00000000000000000000}System;} +{\f14\fnil\fcharset2\fprq2{\*\panose 00050201020108040807}Wingdings;}{\f15\fnil\fcharset78\fprq2 \u-211 _\u-205 _ \u26126 _\u26397 _;}{\f16\fnil\fcharset79\fprq2 \u-17388 _\u-12075 _;}{\f17\fnil\fcharset80\fprq2 \u23435 _\u20307 _;} +{\f18\fnil\fcharset81\fprq2 \u26032 _\u32048 _\u26126 _\u-25900 _;}{\f19\fnil\fcharset78\fprq2 \u-211 _\u-205 _ \u12468 _\u12471 _\u12483 _\u12463 _;}{\f20\fnil\fcharset0\fprq2{\*\panose 00020406040505050203}Century;} +{\f21\fnil\fcharset0\fprq2{\*\panose 00020b06040305040402}Verdana;}{\f22\fnil\fcharset0\fprq2{\*\panose 00020b06040305040402}Tahoma;}{\f23\fnil\fcharset2\fprq2{\*\panose 00010106010101010101}Monotype Sorts;} +{\f24\fnil\fcharset0\fprq2{\*\panose 00020204040303010108}Garamond;}{\f25\fnil\fcharset0\fprq2{\*\panose 00020f05020202040302}Calibri;}{\f26\fnil\fcharset0\fprq2{\*\panose 00020406020503050303}Book Antiqua;} +{\f27\froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f28\fmodern\fcharset128\fprq1{\*\panose 02020609040205080304}MS Mincho{\*\falt ?l?r ??_fc};}{\f29\fnil\fcharset0\fprq2 Lucida Grande;} +{\f30\fnil\fcharset0\fprq2{\*\panose 00020b0a060301010101}Abadi MT Condensed Extra Bold;}{\f31\fnil\fcharset0\fprq2{\*\panose 00020b03060301010101}Abadi MT Condensed Light;}{\f32\fnil\fcharset0\fprq2{\*\panose 00020906040200040203}American Typewriter;} +{\f33\fnil\fcharset0\fprq2{\*\panose 00020906060200040203}American Typewriter Condensed;}{\f34\fnil\fcharset0\fprq2{\*\panose 00020903040200040203}American Typewriter Light;}{\f35\fnil\fcharset0\fprq2{\*\panose 00020b05090000000000}Andale Mono;} +{\f36\fnil\fcharset0\fprq2{\*\panose 00030207020405060605}Apple Chancery;}{\f37\fnil\fcharset0\fprq2{\*\panose 00020b0a040201020202}Arial Black;}{\f38\fnil\fcharset0\fprq2{\*\panose 00020b05060202020302}Arial Narrow;} +{\f39\fnil\fcharset0\fprq2{\*\panose 00020f07040305040302}Arial Rounded MT Bold;}{\f40\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bank Gothic;}{\f41\fnil\fcharset0\fprq2{\*\panose 00020205020704010203}Baskerville;} +{\f42\fnil\fcharset0\fprq2{\*\panose 00020206020805050203}Baskerville Old Face;}{\f43\fnil\fcharset0\fprq2{\*\panose 00020207020704000202}Baskerville Semibold;}{\f44\fnil\fcharset0\fprq2{\*\panose 0004030905020b02020c}Bauhaus 93;} +{\f45\fnil\fcharset0\fprq2{\*\panose 00020205030603050203}Bell MT;}{\f46\fnil\fcharset0\fprq2{\*\panose 00020508060609050204}Bernard MT Condensed;}{\f47\fnil\fcharset0\fprq2{\*\panose 00020006030900000200}Big Caslon;} +{\f48\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Blackmoor LET;}{\f49\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni Ornaments ITC TT;}{\f50\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bodoni SvtyTwo ITC TT-Bold;} +{\f51\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo ITC TT-Book;}{\f52\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo ITC TT-BookIta;} +{\f53\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bodoni SvtyTwo OS ITC TT-Bold;}{\f54\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo OS ITC TT-Book;} +{\f55\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Bodoni SvtyTwo OS ITC TT-BookIt;}{\f56\fnil\fcharset0\fprq2{\*\panose 00020506040505050202}Bookman Old Style;}{\f57\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Bradley Hand ITC TT-Bold;} +{\f58\fnil\fcharset0\fprq2{\*\panose 0004030b070d0b020204}Braggadocio;}{\f59\fnil\fcharset0\fprq2{\*\panose 00020b09030607030202}Britannic Bold;}{\f60\fnil\fcharset0\fprq2{\*\panose 00030608020404060703}Brush Script MT;} +{\f61\fnil\fcharset0\fprq2{\*\panose 00020f07020304040302}Calibri Bold;}{\f62\fnil\fcharset0\fprq2{\*\panose 00020f07020304040a02}Calibri Bold Italic;}{\f63\fnil\fcharset0\fprq2{\*\panose 00020f05020202040a02}Calibri Italic;} +{\f64\fnil\fcharset0\fprq2{\*\panose 00020406030505050303}Calisto MT;}{\f65\fnil\fcharset0\fprq2{\*\panose 00020405030504060302}Cambria;}{\f66\fnil\fcharset0\fprq2{\*\panose 00020408030504060302}Cambria Bold;} +{\f67\fnil\fcharset0\fprq2{\*\panose 00020408030504060a02}Cambria Bold Italic;}{\f68\fnil\fcharset0\fprq2{\*\panose 00020405030504060a02}Cambria Italic;}{\f69\fnil\fcharset0\fprq2{\*\panose 00020e05020303030202}Candara;} +{\f70\fnil\fcharset0\fprq2{\*\panose 00020e07020303030202}Candara Bold;}{\f71\fnil\fcharset0\fprq2{\*\panose 00020e07020303030902}Candara Bold Italic;}{\f72\fnil\fcharset0\fprq2{\*\panose 00020e05020303030902}Candara Italic;} +{\f73\fnil\fcharset0\fprq2{\*\panose 00020b05020202020202}Century Gothic;}{\f74\fnil\fcharset0\fprq2{\*\panose 00020406040505050203}Century Schoolbook;}{\f75\fnil\fcharset0\fprq2{\*\panose 00030506020402020202}Chalkboard;} +{\f76\fnil\fcharset0\fprq2{\*\panose 00030506020402020202}Chalkboard Bold;}{\f77\fnil\fcharset0\fprq2{\*\panose 00020006030200000200}Cochin;}{\f78\fnil\fcharset0\fprq2{\*\panose 00040208050602020302}Colonna MT;} +{\f79\fnil\fcharset0\fprq2{\*\panose 00030f07020303020202}Comic Sans MS;}{\f80\fnil\fcharset0\fprq2{\*\panose 00020b06090202040302}Consolas;}{\f81\fnil\fcharset0\fprq2{\*\panose 00020b07090202040302}Consolas Bold;} +{\f82\fnil\fcharset0\fprq2{\*\panose 00020b07090202040a02}Consolas Bold Italic;}{\f83\fnil\fcharset0\fprq2{\*\panose 00020b06090202040a02}Consolas Italic;}{\f84\fnil\fcharset0\fprq2{\*\panose 00020306020503060303}Constantia;} +{\f85\fnil\fcharset0\fprq2{\*\panose 00020307020603060303}Constantia Bold;}{\f86\fnil\fcharset0\fprq2{\*\panose 00020307020603060a03}Constantia Bold Italic;}{\f87\fnil\fcharset0\fprq2{\*\panose 00020306020503060a03}Constantia Italic;} +{\f88\fnil\fcharset0\fprq2{\*\panose 000208090404030b0204}Cooper Black;}{\f89\fnil\fcharset0\fprq2{\*\panose 00020005040000000200}Copperplate;}{\f90\fnil\fcharset0\fprq2{\*\panose 00020e07050202060204}Copperplate Gothic Bold;} +{\f91\fnil\fcharset0\fprq2{\*\panose 00020e05070202060204}Copperplate Gothic Light;}{\f92\fnil\fcharset0\fprq2{\*\panose 00020006040300000200}Copperplate Light;}{\f93\fnil\fcharset0\fprq2{\*\panose 00020b05030202040202}Corbel;} +{\f94\fnil\fcharset0\fprq2{\*\panose 00020b07030202040202}Corbel Bold;}{\f95\fnil\fcharset0\fprq2{\*\panose 00020b07030202040902}Corbel Bold Italic;}{\f96\fnil\fcharset0\fprq2{\*\panose 00020b05030202040902}Corbel Italic;} +{\f97\fnil\fcharset0\fprq2{\*\panose 00040404040507020202}Curlz MT;}{\f98\fnil\fcharset0\fprq2{\*\panose 0004020505020e030405}Desdemona;}{\f99\fnil\fcharset0\fprq2{\*\panose 00020005030000000200}Didot;} +{\f100\fnil\fcharset0\fprq2{\*\panose 00030303020407070d08}Edwardian Script ITC;}{\f101\fnil\fcharset0\fprq2{\*\panose 00020907070805050203}Engravers MT;}{\f102\fnil\fcharset0\fprq2{\*\panose 00020b05030401020201}Euphemia UCAS;} +{\f103\fnil\fcharset0\fprq2{\*\panose 00020b08030401020201}Euphemia UCAS Bold;}{\f104\fnil\fcharset0\fprq2{\*\panose 00020b05030401020201}Euphemia UCAS Italic;}{\f105\fnil\fcharset0\fprq2{\*\panose 00020b05040202020502}Eurostile;} +{\f106\fnil\fcharset0\fprq2{\*\panose 000204060206030a0203}Footlight MT Light;}{\f107\fnil\fcharset0\fprq2{\*\panose 00020b06020202040203}Futura;}{\f108\fnil\fcharset0\fprq2{\*\panose 00020b05060202040302}Futura Condensed;} +{\f109\fnil\fcharset0\fprq2{\*\panose 00020004000000000000}Geeza Pro;}{\f110\fnil\fcharset0\fprq2{\*\panose 00020007000000000000}Geeza Pro Bold;}{\f111\fnil\fcharset0\fprq2{\*\panose 00020405020504050203}Georgia;} +{\f112\fnil\fcharset0\fprq2{\*\panose 00020b05020201040202}Gill Sans;}{\f113\fnil\fcharset0\fprq2{\*\panose 00020b03020201040202}Gill Sans Light;}{\f114\fnil\fcharset0\fprq2{\*\panose 00020b0a020201040202}Gill Sans Ultra Bold;} +{\f115\fnil\fcharset0\fprq2{\*\panose 00020308080206010101}Gloucester MT Extra Condensed;}{\f116\fnil\fcharset0\fprq2{\*\panose 00020205020503050203}Goudy Old Style;}{\f117\fnil\fcharset0\fprq2{\*\panose 00020b07060409020602}Haettenschweiler;} +{\f118\fnil\fcharset0\fprq2{\*\panose 0004040505050a020207}Harrington;}{\f119\fnil\fcharset0\fprq2{\*\panose 00020005030000000200}Helvetica Neue;}{\f120\fnil\fcharset0\fprq2{\*\panose 0002000a060000000200}Helvetica Neue Black Condensed;} +{\f121\fnil\fcharset0\fprq2{\*\panose 00020008060000000200}Helvetica Neue Bold Condensed;}{\f122\fnil\fcharset0\fprq2{\*\panose 00020004030000000200}Helvetica Neue Light;} +{\f123\fnil\fcharset0\fprq2{\*\panose 00020002060000000200}Helvetica Neue UltraLight;}{\f124\fnil\fcharset0\fprq2{\*\panose 00020005050000000200}Herculanum;}{\f125\fnil\fcharset0\fprq2{\*\panose 00020306020505060202}Hoefler Text;} +{\f126\fnil\fcharset2\fprq2{\*\panose 00000000000000000000}Hoefler Text Ornaments;}{\f127\fnil\fcharset0\fprq2{\*\panose 00020b08060309020502}Impact;}{\f128\fnil\fcharset0\fprq2{\*\panose 00040206050603030302}Imprint MT Shadow;} +{\f129\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}InaiMathi;}{\f130\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Jazz LET;}{\f131\fnil\fcharset0\fprq2{\*\panose 00040307050d0c020207}Kino MT;} +{\f132\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}Lucida Blackletter;}{\f133\fnil\fcharset0\fprq2{\*\panose 00020406020505050203}Lucida Bright;}{\f134\fnil\fcharset0\fprq2{\*\panose 00030101010101010101}Lucida Calligraphy;} +{\f135\fnil\fcharset0\fprq2{\*\panose 00020606020505050202}Lucida Fax;}{\f136\fnil\fcharset0\fprq2{\*\panose 00030101010101010101}Lucida Handwriting;}{\f137\fnil\fcharset0\fprq2{\*\panose 00020b06020305040202}Lucida Sans;} +{\f138\fnil\fcharset0\fprq2{\*\panose 00020b05090305040302}Lucida Sans Typewriter;}{\f139\fnil\fcharset0\fprq2{\*\panose 00020004000000000000}Marker Felt;}{\f140\fnil\fcharset0\fprq2{\*\panose 00030208020606020702}Matura MT Script Capitals;} +{\f141\fnil\fcharset0\fprq2{\*\panose 00030907020304070204}Mistral;}{\f142\fnil\fcharset0\fprq2{\*\panose 00020707040705050203}Modern No. 20;}{\f143\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Mona Lisa Solid ITC TT;} +{\f144\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Monaco;}{\f145\fnil\fcharset0\fprq2{\*\panose 00030101010102010101}Monotype Corsiva;}{\f146\fnil\fcharset2\fprq2{\*\panose 00020005000000000000}MT Extra;} +{\f147\fnil\fcharset0\fprq2{\*\panose 00020b05040202030202}News Gothic MT;}{\f148\fnil\fcharset0\fprq2{\*\panose 00040506020807020202}Onyx;}{\f149\fnil\fcharset0\fprq2{\*\panose 00020005030600000200}Optima;} +{\f150\fnil\fcharset0\fprq2{\*\panose 0002000b030000000200}Optima ExtraBlack;}{\f151\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Palatino;}{\f152\fnil\fcharset0\fprq2{\*\panose 00020b06020402000203}Papyrus;} +{\f153\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Party LET;}{\f154\fnil\fcharset0\fprq2{\*\panose 00020205020605050208}Perpetua Titling MT;}{\f155\fnil\fcharset0\fprq2{\*\panose 00020200000000000000}Plantagenet Cherokee;} +{\f156\fnil\fcharset0\fprq2{\*\panose 00040506030a06020202}Playbill;}{\f157\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Princetown LET;}{\f158\fnil\fcharset0\fprq2{\*\panose 00020606030202050204}Rockwell;} +{\f159\fnil\fcharset0\fprq2{\*\panose 00020609030405050204}Rockwell Extra Bold;}{\f160\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Santa Fe LET;}{\f161\fnil\fcharset0\fprq2{\*\panose 00020005000000000000}Savoye LET;} +{\f162\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}SchoolHouse Printed A;}{\f163\fnil\fcharset0\fprq2{\*\panose 00020d05020202040202}Skia;}{\f164\fnil\fcharset0\fprq2{\*\panose 00020006030800000900}Snell Roundhand;} +{\f165\fnil\fcharset0\fprq2{\*\panose 0002000a040900000900}Snell Roundhand Black;}{\f166\fnil\fcharset0\fprq2{\*\panose 00020008030900000900}Snell Roundhand Bold;}{\f167\fnil\fcharset0\fprq2{\*\panose 00040409050d08020204}Stencil;} +{\f168\fnil\fcharset0\fprq2{\*\panose 00000007000000000000}Stone Sans ITC TT-Bold;}{\f169\fnil\fcharset0\fprq2{\*\panose 00000006000000000000}Stone Sans ITC TT-Semi;}{\f170\fnil\fcharset0\fprq2{\*\panose 00000006000000000000}Stone Sans ITC TT-SemiIta;} +{\f171\fnil\fcharset0\fprq2{\*\panose 00000004000000000000}Synchro LET;}{\f172\fnil\fcharset0\fprq2{\*\panose 00020b06030202020202}Trebuchet MS;}{\f173\fnil\fcharset2\fprq2{\*\panose 00050301020105090607}Webdings;} +{\f174\fnil\fcharset0\fprq2{\*\panose 00020a0a070505050204}Wide Latin;}{\f175\fnil\fcharset2\fprq2{\*\panose 00050201020105070707}Wingdings 2;}{\f176\fnil\fcharset2\fprq2{\*\panose 00050401020108070707}Wingdings 3;} +{\f177\fnil\fcharset2\fprq2{\*\panose 00050201020107040206}Zapf Dingbats;}{\f178\fnil\fcharset0\fprq2{\*\panose 0003030300040707070c}Zapfino;}{\f179\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Courier CE;} +{\f180\fnil\fcharset88\fprq2{\*\panose 00020b05030304040402}Geneva CE;}{\f181\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Helvetica CE;}{\f182\fnil\fcharset88\fprq2{\*\panose 00020004000000000000}Krungthep;} +{\f183\fnil\fcharset88\fprq2{\*\panose 00020b06000405020202}Lucida Grande CE;}{\f184\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Monaco CE;}{\f185\fnil\fcharset88\fprq2{\*\panose 00000004000000000000}Silom;} +{\f186\fnil\fcharset88\fprq2{\*\panose 00020005000000000000}Times CE;}{\f187\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Ayuthaya;}{\f188\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Charcoal CY;} +{\f189\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Geneva CY;}{\f190\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Helvetica CY;}{\f191\fnil\fcharset89\fprq2{\*\panose 00020b06000405020202}Lucida Grande CY;} +{\f192\fnil\fcharset89\fprq2{\*\panose 00020005000000000000}Monaco CY;}{\f193\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Sathu;}{\f194\fnil\fcharset89\fprq2{\*\panose 00000004000000000000}Thonburi;} +{\f195\fnil\fcharset89\fprq2{\*\panose 00020005000000000000}Times CY;}{\f196\fnil\fcharset81\fprq2{\*\panose 00020005000000000000}Apple LiGothic Medium;}{\f197\fnil\fcharset81\fprq2{\*\panose 00020005000000000000}Apple LiSung Light;} +{\f198\fnil\fcharset81\fprq2{\*\panose 00020106010001010101}BiauKai;}{\f199\fnil\fcharset81\fprq2 \u20791 _\u23435 _ Pro;}{\f200\fnil\fcharset81\fprq2 \u20791 _\u-24879 _ Pro;}{\f201\fnil\fcharset80\fprq2 GB18030 Bitmap;} +{\f202\fnil\fcharset80\fprq2{\*\panose 00020005000000000000}Hei;}{\f203\fnil\fcharset80\fprq2{\*\panose 00020005000000000000}Kai;}{\f204\fnil\fcharset80\fprq2 \u21326 _\u25991 _\u20223 _\u23435 _;} +{\f205\fnil\fcharset80\fprq2 \u21326 _\u25991 _\u-24879 _\u20307 _;}{\f206\fnil\fcharset80\fprq2 \u21326 _\u25991 _\u26999 _\u20307 _;}{\f207\fnil\fcharset80\fprq2 \u21326 _\u25991 _\u23435 _\u20307 _;} +{\f208\fnil\fcharset80\fprq2 \u21326 _\u25991 _\u32454 _\u-24879 _;}{\f209\fnil\fcharset78\fprq2 \u-211 _\u-205 _ \u-208 _\u12468 _\u12471 _\u12483 _\u12463 _;}{\f210\fnil\fcharset78\fprq2 \u-211 _\u-205 _ \u-208 _\u26126 _\u26397 _;} +{\f211\fnil\fcharset78\fprq2{\*\panose 00020b06000000000000}Osaka;}{\f212\fnil\fcharset78\fprq2 Osaka\u8722 _\u31561 _\u24133 _;}{\f213\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u-30254 _\u12468 _ Pro W3;} +{\f214\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u-30254 _\u12468 _ Pro W6;}{\f215\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u-30254 _\u12468 _ Std W8;} +{\f216\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u20024 _\u12468 _ Pro W4;}{\f217\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u26126 _\u26397 _ Pro W3;} +{\f218\fnil\fcharset78\fprq2 \u12498 _\u12521 _\u12462 _\u12494 _\u26126 _\u26397 _ Pro W6;}{\f219\fnil\fcharset79\fprq2 #PC\u-17787 _\u-14224 _;}{\f220\fnil\fcharset79\fprq2 #\u-21119 _\u-16100 _\u-13132 _;} +{\f221\fnil\fcharset79\fprq2 #\u-10940 _\u-20944 _\u-13132 _;}{\f222\fnil\fcharset79\fprq2 #\u-10780 _\u-19236 _\u-18564 _\u-14472 _A;}{\f223\fnil\fcharset79\fprq2{\*\panose 00020005000000000000}AppleGothic;} +{\f224\fnil\fcharset79\fprq2{\*\panose 00020005000000000000}AppleMyungjo;}{\f225\fnil\fcharset79\fprq2 \u-21132 _\u-17988 _;}{\f226\fswiss\fcharset128\fprq2{\*\panose 020b0604020202020204}Arial Unicode MS{\*\falt Times};} +{\f227\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}Bitstream Vera Sans{\*\falt Times New Roman};}{\f228\fnil\fcharset77\fprq0{\*\panose 00000000000000000000}LucidaGrande{\*\falt Lucida Grande};} +{\f229\fnil\fcharset129\fprq1{\*\panose 02030600000101010101}Batang{\*\falt \'a1\'a7IoUAA};}{\f230\fnil\fcharset134\fprq2{\*\panose 02010600030101010101}SimSun{\*\falt ???\'a1\'ec??};} +{\f231\fnil\fcharset136\fprq2{\*\panose 02010601000101010101}PMingLiU{\*\falt !Ps2OcuAe};}{\f232\fmodern\fcharset128\fprq1{\*\panose 020b0609070205080204}MS Gothic{\*\falt ?l?r ?S?V?b?N};} +{\f233\fmodern\fcharset129\fprq1{\*\panose 020b0600000101010101}Dotum{\*\falt \'a2\'aeIi\'a2\'aeE\'a2\'ae\'a9\'ad\'a2\'aeE?o};}{\f234\fmodern\fcharset134\fprq1{\*\panose 02010600030101010101}SimHei{\*\falt o?\'a1\'ec2?\'a1\'ec??};} +{\f235\fmodern\fcharset136\fprq1{\*\panose 02010609000101010101}MingLiU{\*\falt 2OcuAe};}{\f236\froman\fcharset128\fprq1{\*\panose 02020609040305080305}Mincho{\*\falt ??\u129 _fc};} +{\f237\froman\fcharset129\fprq1{\*\panose 020b0600000101010101}Gulim{\*\falt \'a1\'cb\'a2\'e7\'a1\'a7u\'a2\'ae\'a1\'d7u\'a2\'aeE\'a2\'ae\'a9\'ad\'a1\'a7I\'a2\'aeA};}{\f238\froman\fcharset222\fprq2{\*\panose 02020603050405020304}Angsana New;} +{\f239\froman\fcharset222\fprq2{\*\panose 020b0304020202020204}Cordia New;}{\f240\fnil\fcharset0\fprq2{\*\panose 00000400000000000000}Mangal;}{\f241\fnil\fcharset0\fprq2{\*\panose 02000400000000000000}Latha;} +{\f242\froman\fcharset0\fprq2{\*\panose 010a0502050306030303}Sylfaen;}{\f243\fnil\fcharset0\fprq2{\*\panose 01010600010101010101}Vrinda;}{\f244\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Raavi;} +{\f245\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Shruti;}{\f246\froman\fcharset77\fprq2{\*\panose 00000400000000000000}Sendnya;}{\f247\fnil\fcharset0\fprq2{\*\panose 02000500000000000000}Gautami;} +{\f248\fnil\fcharset0\fprq2{\*\panose 00000400000000000000}Tunga;}{\f249\fscript\fcharset0\fprq2{\*\panose 00000000000000000000}Estrangelo Edessa;}{\f250\froman\fcharset0\fprq2{\*\panose 02020503030404060203}Kartika;} +{\f251\froman\fcharset0\fprq0{\*\panose 00000000000000000000}TimesNewRoman{\*\falt Times New Roman};}{\f252\fnil\fcharset2\fprq2{\*\panose 00000000000000000000}Marlett;}{\f253\fmodern\fcharset0\fprq1{\*\panose 020b0609040504020204}Lucida Console;} +{\f254\fswiss\fcharset0\fprq2{\*\panose 020b0602030504020204}Lucida Sans Unicode;}{\f255\fswiss\fcharset0\fprq2{\*\panose 020b0603020102020204}Franklin Gothic Medium;}{\f256\froman\fcharset0\fprq2{\*\panose 02040502050505030304}Palatino Linotype;} +{\f257\fnil\fcharset0\fprq2{\*\panose 00000000000000000000}MV Boli;}{\f258\fswiss\fcharset0\fprq2{\*\panose 020b0604020202020204}Microsoft Sans Serif;}{\f259\fswiss\fcharset0\fprq2{\*\panose 020b0503020202020204}Agency FB;} +{\f260\fdecor\fcharset0\fprq2{\*\panose 04020705040a02060702}Algerian;}{\f261\fswiss\fcharset128\fprq2{\*\panose 020b0604020202020204}@Arial Unicode MS;}{\f262\fswiss\fcharset0\fprq2{\*\panose 020e0602020502020306}Berlin Sans FB;} +{\f263\fdecor\fcharset0\fprq2{\*\panose 04020505051007020d02}Blackadder ITC;}{\f264\froman\fcharset0\fprq2{\*\panose 02070603080606020203}Bodoni MT;}{\f265\froman\fcharset0\fprq2{\*\panose 02070a03080606020203}Bodoni MT Black;} +{\f266\froman\fcharset0\fprq2{\*\panose 02070606080606020203}Bodoni MT Condensed;}{\f267\froman\fcharset0\fprq2{\*\panose 02070706080601050204}Bodoni MT Poster Compressed;}{\f268\fscript\fcharset0\fprq2{\*\panose 03070402050302030203}Bradley Hand ITC;} +{\f269\fdecor\fcharset0\fprq2{\*\panose 04040905080b02020502}Broadway;}{\f270\froman\fcharset0\fprq2{\*\panose 0207040306080b030204}Californian FB;}{\f271\froman\fcharset0\fprq2{\*\panose 020a0402060406010301}Castellar;} +{\f272\froman\fcharset0\fprq2{\*\panose 02030504050205020304}Centaur;}{\f273\fdecor\fcharset0\fprq2{\*\panose 04020404031007020602}Chiller;}{\f274\froman\fcharset0\fprq2{\*\panose 02020904090505020303}Elephant;} +{\f275\fswiss\fcharset0\fprq2{\*\panose 020b0907030504020204}Eras Bold ITC;}{\f276\fswiss\fcharset0\fprq2{\*\panose 020b0805030504020804}Eras Demi ITC;}{\f277\fswiss\fcharset0\fprq2{\*\panose 020b0402030504020804}Eras Light ITC;} +{\f278\fswiss\fcharset0\fprq2{\*\panose 020b0602030504020804}Eras Medium ITC;}{\f279\fdecor\fcharset0\fprq2{\*\panose 04060505060202020a04}Felix Titling;}{\f280\fscript\fcharset0\fprq2{\*\panose 03060902040502070203}Forte;} +{\f281\fswiss\fcharset0\fprq2{\*\panose 020b0503020102020204}Franklin Gothic Book;}{\f282\fswiss\fcharset0\fprq2{\*\panose 020b0703020102020204}Franklin Gothic Demi;}{\f283\fswiss\fcharset0\fprq2{\*\panose 020b0706030402020204}Franklin Gothic Demi Cond;} +{\f284\fswiss\fcharset0\fprq2{\*\panose 020b0903020102020204}Franklin Gothic Heavy;}{\f285\fswiss\fcharset0\fprq2{\*\panose 020b0606030402020204}Franklin Gothic Medium Cond;}{\f286\fscript\fcharset0\fprq2{\*\panose 030804020302050b0404}Freestyle Script;} +{\f287\fscript\fcharset0\fprq2{\*\panose 03020402040607040605}French Script MT;}{\f288\fdecor\fcharset0\fprq2{\*\panose 04040504061007020d02}Gigi;}{\f289\fswiss\fcharset0\fprq2{\*\panose 020b0902020104020203}Gill Sans MT Ext Condensed Bold;} +{\f290\fswiss\fcharset0\fprq2{\*\panose 020b0502020104020203}Gill Sans MT;}{\f291\fswiss\fcharset0\fprq2{\*\panose 020b0506020104020203}Gill Sans MT Condensed;}{\f292\fswiss\fcharset0\fprq2{\*\panose 020b0a06020104020203}Gill Sans Ultra Bold Condensed;} +{\f293\froman\fcharset0\fprq2{\*\panose 0202090407030b020401}Goudy Stout;}{\f294\fdecor\fcharset0\fprq2{\*\panose 04030604020f02020d02}Harlow Solid Italic;}{\f295\froman\fcharset0\fprq2{\*\panose 02040502050506030303}High Tower Text;} +{\f296\fdecor\fcharset0\fprq2{\*\panose 04090605060d06020702}Jokerman;}{\f297\fdecor\fcharset0\fprq2{\*\panose 04040403040a02020202}Juice ITC;}{\f298\fscript\fcharset0\fprq2{\*\panose 03050502040202030202}Kristen ITC;} +{\f299\fscript\fcharset0\fprq2{\*\panose 030304020206070d0d06}Kunstler Script;}{\f300\fnil\fcharset2\fprq2{\*\panose 05010100010000000000}MS Outlook;}{\f301\fdecor\fcharset0\fprq2{\*\panose 04030805050802020d02}Magneto;} +{\f302\fswiss\fcharset0\fprq2{\*\panose 020e0502030308020204}Maiandra GD;}{\f303\fdecor\fcharset0\fprq2{\*\panose 04020502070703030202}Niagara Engraved;}{\f304\fdecor\fcharset0\fprq2{\*\panose 04020502070702020202}Niagara Solid;} +{\f305\fmodern\fcharset0\fprq2{\*\panose 02010509020102010303}OCR A Extended;}{\f306\fscript\fcharset0\fprq2{\*\panose 03040902040508030806}Old English Text MT;}{\f307\fscript\fcharset0\fprq2{\*\panose 030303020206070c0b05}Palace Script MT;} +{\f308\fscript\fcharset0\fprq2{\*\panose 03040602040708040804}Parchment;}{\f309\froman\fcharset0\fprq2{\*\panose 02020502060401020303}Perpetua;}{\f310\froman\fcharset0\fprq2{\*\panose 02080502050505020702}Poor Richard;} +{\f311\fscript\fcharset0\fprq2{\*\panose 03060402040406080204}Pristina;}{\f312\fscript\fcharset0\fprq2{\*\panose 03070502040507070304}Rage Italic;}{\f313\fdecor\fcharset0\fprq2{\*\panose 04040805050809020602}Ravie;} +{\f314\froman\fcharset0\fprq2{\*\panose 02060603050405020104}Rockwell Condensed;}{\f315\fscript\fcharset0\fprq2{\*\panose 030604020304060b0204}Informal Roman;}{\f316\fscript\fcharset0\fprq2{\*\panose 03040602040607080904}Script MT Bold;} +{\f317\fdecor\fcharset0\fprq2{\*\panose 04020904020102020604}Showcard Gothic;}{\f318\fdecor\fcharset0\fprq2{\*\panose 04040a07060a02020202}Snap ITC;}{\f319\fswiss\fcharset0\fprq2{\*\panose 020b0602020104020603}Tw Cen MT;} +{\f320\fswiss\fcharset0\fprq2{\*\panose 020b0606020104020203}Tw Cen MT Condensed;}{\f321\fdecor\fcharset0\fprq2{\*\panose 04020404030d07020202}Tempus Sans ITC;}{\f322\fscript\fcharset0\fprq2{\*\panose 03070502030502020203}Viner Hand ITC;} +{\f323\fscript\fcharset0\fprq2{\*\panose 03020602050506090804}Vivaldi;}{\f324\fscript\fcharset0\fprq2{\*\panose 03050402040407070305}Vladimir Script;}{\f325\fswiss\fcharset0\fprq2{\*\panose 020e0802020502020306}Berlin Sans FB Demi;} +{\f326\fswiss\fcharset0\fprq2{\*\panose 020b0604030504040204}MS Reference Sans Serif;}{\f327\fnil\fcharset2\fprq2{\*\panose 05000500000000000000}MS Reference Specialty;} +{\f328\fswiss\fcharset0\fprq2{\*\panose 020b0803020202020204}Tw Cen MT Condensed Extra Bold;}{\f329\fnil\fcharset2\fprq2{\*\panose 05010101010101010101}Bookshelf Symbol 7;}{\f330\fswiss\fcharset0\fprq2{\*\panose 020b0503030403020204}Myriad Web Pro;} +{\f331\fswiss\fcharset0\fprq2{\*\panose 020b0506030403020204}Myriad Web Pro Condensed;}{\f332\fnil\fcharset129\fprq0{\*\panose 00000000000000000000}@Batang;}{\f333\fnil\fcharset0\fprq0{\*\panose 00000000000000000000}AdvTimes;} +{\f334\fnil\fcharset2\fprq2{\*\panose 05000400000000000000}ExtraS 1;}{\f335\froman\fcharset0\fprq0{\*\panose 00000000000000000000}sans-serif{\*\falt Times New Roman};}{\f336\fnil\fcharset134\fprq0{\*\panose 00000000000000000000}@SimSun;} +{\f337\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Vietnames;}{\f338\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Balti;} +{\f339\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Gre;}{\f340\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Bal;} +{\f341\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans Ultra Bold Condensed ;}{\f342\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif (Vietna;} +{\f343\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT Condensed Extra Bold ;}{\f344\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Times New Roman CE{\*\falt Times New Roman};} +{\f345\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Times New Roman Cyr{\*\falt Times New Roman};}{\f346\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Times New Roman Greek{\*\falt Times New Roman};} +{\f347\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Times New Roman Tur{\*\falt Times New Roman};}{\f348\froman\fcharset177\fprq2{\*\panose 00000000000000000000}Times New Roman (Hebrew){\*\falt Times New Roman};} +{\f349\froman\fcharset178\fprq2{\*\panose 00000000000000000000}Times New Roman (Arabic){\*\falt Times New Roman};}{\f350\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Times New Roman Baltic{\*\falt Times New Roman};} +{\f351\froman\fcharset163\fprq2{\*\panose 00000000000000000000}Times New Roman (Vietnamese){\*\falt Times New Roman};}{\f352\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial CE{\*\falt Trebuchet MS};} +{\f353\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Cyr{\*\falt Trebuchet MS};}{\f354\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Greek{\*\falt Trebuchet MS};} +{\f355\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Tur{\*\falt Trebuchet MS};}{\f356\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Arial (Hebrew){\*\falt Trebuchet MS};} +{\f357\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Arial (Arabic){\*\falt Trebuchet MS};}{\f358\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Baltic{\*\falt Trebuchet MS};} +{\f359\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Arial (Vietnamese){\*\falt Trebuchet MS};}{\f360\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Courier New CE;} +{\f361\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Courier New Cyr;}{\f362\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Courier New Greek;}{\f363\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Courier New Tur;} +{\f364\fmodern\fcharset177\fprq1{\*\panose 00000000000000000000}Courier New (Hebrew);}{\f365\fmodern\fcharset178\fprq1{\*\panose 00000000000000000000}Courier New (Arabic);} +{\f366\fmodern\fcharset186\fprq1{\*\panose 00000000000000000000}Courier New Baltic;}{\f367\fmodern\fcharset163\fprq1{\*\panose 00000000000000000000}Courier New (Vietnamese);}{\f368\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Helvetica Cyr;} +{\f369\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Helvetica Greek;}{\f370\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Helvetica Tur;}{\f371\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Helvetica (Hebrew);} +{\f372\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Helvetica (Arabic);}{\f373\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Helvetica Baltic;}{\f374\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Helvetica (Vietnamese);} +{\f375\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Century CE;}{\f376\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Century Cyr;}{\f377\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Century Greek;} +{\f378\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Century Tur;}{\f379\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Century Baltic;}{\f380\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Sylfaen CE;} +{\f381\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Sylfaen Cyr;}{\f382\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Sylfaen Greek;}{\f383\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Sylfaen Tur;} +{\f384\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Sylfaen Baltic;}{\f385\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Hebrew);} +{\f386\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Arabic);}{\f387\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Vietnamese);} +{\f388\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Arial Unicode MS (Thai);}{\f389\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tahoma CE;}{\f390\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Tahoma Cyr;} +{\f391\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Tahoma Greek;}{\f392\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Tahoma Tur;}{\f393\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Tahoma (Hebrew);} +{\f394\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Tahoma (Arabic);}{\f395\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Tahoma Baltic;}{\f396\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Tahoma (Vietnamese);} +{\f397\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Tahoma (Thai);}{\f398\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Calibri CE;}{\f399\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Calibri Cyr;} +{\f400\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Calibri Greek;}{\f401\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Calibri Tur;}{\f402\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Calibri Baltic;} +{\f403\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Cambria CE;}{\f404\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Cambria Cyr;}{\f405\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Cambria Greek;} +{\f406\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Cambria Tur;}{\f407\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Cambria Baltic;}{\f408\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial Narrow CE;} +{\f409\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Narrow Cyr;}{\f410\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Narrow Greek;}{\f411\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Narrow Tur;} +{\f412\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Narrow Baltic;}{\f413\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Times Cyr;}{\f414\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Times Greek;} +{\f415\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Times Tur;}{\f416\froman\fcharset177\fprq2{\*\panose 00000000000000000000}Times (Hebrew);}{\f417\froman\fcharset178\fprq2{\*\panose 00000000000000000000}Times (Arabic);} +{\f418\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Times Baltic;}{\f419\froman\fcharset163\fprq2{\*\panose 00000000000000000000}Times (Vietnamese);}{\f420\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Georgia CE;} +{\f421\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Georgia Cyr;}{\f422\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Georgia Greek;}{\f423\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Georgia Tur;} +{\f424\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Georgia Baltic;}{\f425\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Century Gothic CE;}{\f426\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Century Gothic Cyr;} +{\f427\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Century Gothic Greek;}{\f428\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Century Gothic Tur;}{\f429\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Century Gothic Baltic;} +{\f430\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Lucida Console CE;}{\f431\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Lucida Console Cyr;}{\f432\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Lucida Console Greek;} +{\f433\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Lucida Console Tur;}{\f434\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode CE;} +{\f435\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Cyr;}{\f436\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Greek;} +{\f437\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode Tur;}{\f438\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Lucida Sans Unicode (Hebrew);} +{\f439\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Verdana CE;}{\f440\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Verdana Cyr;}{\f441\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Verdana Greek;} +{\f442\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Verdana Tur;}{\f443\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Verdana Baltic;}{\f444\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}Verdana (Vietnamese);} +{\f445\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Arial Black CE;}{\f446\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Arial Black Cyr;}{\f447\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Arial Black Greek;} +{\f448\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Arial Black Tur;}{\f449\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Arial Black Baltic;}{\f450\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Comic Sans MS CE;} +{\f451\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Comic Sans MS Cyr;}{\f452\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Comic Sans MS Greek;}{\f453\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Comic Sans MS Tur;} +{\f454\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Comic Sans MS Baltic;}{\f455\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Impact CE;}{\f456\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Impact Cyr;} +{\f457\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Impact Greek;}{\f458\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Impact Tur;}{\f459\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Impact Baltic;} +{\f460\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium CE;}{\f461\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cyr;} +{\f462\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Greek;}{\f463\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Tur;} +{\f464\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Baltic;}{\f465\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Palatino Linotype CE;} +{\f466\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Palatino Linotype Cyr;}{\f467\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Palatino Linotype Greek;} +{\f468\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Palatino Linotype Tur;}{\f469\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Palatino Linotype Baltic;} +{\f470\froman\fcharset163\fprq2{\*\panose 00000000000000000000}Palatino Linotype (Vietnamese);}{\f471\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Trebuchet MS CE;} +{\f472\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Trebuchet MS Cyr;}{\f473\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Trebuchet MS Greek;}{\f474\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Trebuchet MS Tur;} +{\f475\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Trebuchet MS Baltic;}{\f476\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif CE;} +{\f477\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Cyr;}{\f478\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Greek;} +{\f479\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Tur;}{\f480\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Hebrew);} +{\f481\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Arabic);}{\f482\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif Baltic;} +{\f483\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}Microsoft Sans Serif (Thai);}{\f484\fswiss\fcharset0\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Western;} +{\f485\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS CE;}{\f486\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Cyr;} +{\f487\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Greek;}{\f488\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Tur;} +{\f489\fswiss\fcharset177\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Hebrew);}{\f490\fswiss\fcharset178\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Arabic);} +{\f491\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS Baltic;}{\f492\fswiss\fcharset163\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Vietnamese);} +{\f493\fswiss\fcharset222\fprq2{\*\panose 00000000000000000000}@Arial Unicode MS (Thai);}{\f494\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Bodoni MT Poster Compressed Tur;} +{\f495\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Book Antiqua CE;}{\f496\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Book Antiqua Cyr;}{\f497\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Book Antiqua Greek;} +{\f498\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Book Antiqua Tur;}{\f499\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Book Antiqua Baltic;}{\f500\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Bookman Old Style CE;} +{\f501\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Bookman Old Style Cyr;}{\f502\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Bookman Old Style Greek;} +{\f503\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Bookman Old Style Tur;}{\f504\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Bookman Old Style Baltic;} +{\f505\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Century Schoolbook CE;}{\f506\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Century Schoolbook Cyr;} +{\f507\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Century Schoolbook Greek;}{\f508\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Century Schoolbook Tur;} +{\f509\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Century Schoolbook Baltic;}{\f510\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book CE;} +{\f511\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Cyr;}{\f512\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Greek;} +{\f513\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Tur;}{\f514\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Book Baltic;} +{\f515\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi CE;}{\f516\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cyr;} +{\f517\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Greek;}{\f518\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Tur;} +{\f519\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Baltic;}{\f520\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond CE;} +{\f521\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Cyr;}{\f522\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Greek;} +{\f523\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Demi Cond Tur;}{\f524\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy CE;} +{\f525\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Cyr;}{\f526\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Greek;} +{\f527\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Tur;}{\f528\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Franklin Gothic Heavy Baltic;} +{\f529\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond CE;}{\f530\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Cyr;} +{\f531\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Franklin Gothic Medium Cond Tur;}{\f532\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Garamond CE;}{\f533\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Garamond Cyr;} +{\f534\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Garamond Greek;}{\f535\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Garamond Tur;}{\f536\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Garamond Baltic;} +{\f537\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans MT CE;}{\f538\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans MT Condensed CE;} +{\f539\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Gill Sans Ultra Bold CE;}{\f540\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Haettenschweiler CE;} +{\f541\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Haettenschweiler Cyr;}{\f542\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Haettenschweiler Greek;} +{\f543\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Haettenschweiler Tur;}{\f544\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Haettenschweiler Baltic;}{\f545\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Mistral CE;} +{\f546\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Mistral Cyr;}{\f547\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Mistral Greek;}{\f548\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Mistral Tur;} +{\f549\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Mistral Baltic;}{\f550\fscript\fcharset238\fprq2{\*\panose 00000000000000000000}Monotype Corsiva CE;}{\f551\fscript\fcharset204\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Cyr;} +{\f552\fscript\fcharset161\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Greek;}{\f553\fscript\fcharset162\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Tur;} +{\f554\fscript\fcharset186\fprq2{\*\panose 00000000000000000000}Monotype Corsiva Baltic;}{\f555\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT CE;}{\f556\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Tw Cen MT Condensed CE;} +{\f557\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif CE;}{\f558\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Cyr;} +{\f559\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Greek;}{\f560\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Tur;} +{\f561\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}MS Reference Sans Serif Baltic;}{\f562\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Myriad Web Pro CE;} +{\f563\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Tur;}{\f564\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Baltic;} +{\f565\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed CE;}{\f566\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed Tur;} +{\f567\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Myriad Web Pro Condensed Baltic;}{\f568\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Candara CE;}{\f569\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Candara Cyr;} +{\f570\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Candara Greek;}{\f571\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Candara Tur;}{\f572\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Candara Baltic;} +{\f573\fmodern\fcharset238\fprq1{\*\panose 00000000000000000000}Consolas CE;}{\f574\fmodern\fcharset204\fprq1{\*\panose 00000000000000000000}Consolas Cyr;}{\f575\fmodern\fcharset161\fprq1{\*\panose 00000000000000000000}Consolas Greek;} +{\f576\fmodern\fcharset162\fprq1{\*\panose 00000000000000000000}Consolas Tur;}{\f577\fmodern\fcharset186\fprq1{\*\panose 00000000000000000000}Consolas Baltic;}{\f578\froman\fcharset238\fprq2{\*\panose 00000000000000000000}Constantia CE;} +{\f579\froman\fcharset204\fprq2{\*\panose 00000000000000000000}Constantia Cyr;}{\f580\froman\fcharset161\fprq2{\*\panose 00000000000000000000}Constantia Greek;}{\f581\froman\fcharset162\fprq2{\*\panose 00000000000000000000}Constantia Tur;} +{\f582\froman\fcharset186\fprq2{\*\panose 00000000000000000000}Constantia Baltic;}{\f583\fswiss\fcharset238\fprq2{\*\panose 00000000000000000000}Corbel CE;}{\f584\fswiss\fcharset204\fprq2{\*\panose 00000000000000000000}Corbel Cyr;} +{\f585\fswiss\fcharset161\fprq2{\*\panose 00000000000000000000}Corbel Greek;}{\f586\fswiss\fcharset162\fprq2{\*\panose 00000000000000000000}Corbel Tur;}{\f587\fswiss\fcharset186\fprq2{\*\panose 00000000000000000000}Corbel Baltic;} +{\f750\froman\fcharset238\fprq2 Cambria Math CE;}{\f751\froman\fcharset204\fprq2 Cambria Math Cyr;}{\f753\froman\fcharset161\fprq2 Cambria Math Greek;}{\f754\froman\fcharset162\fprq2 Cambria Math Tur;}{\f755\froman\fcharset186\fprq2 Cambria Math Baltic;} +{\f758\fmodern\fcharset0\fprq1 ?l?r ??_fc{\*\falt ?l?r ??_fc};}{\f756\fmodern\fcharset238\fprq1 ?l?r ??_fc CE{\*\falt ?l?r ??_fc};}{\f757\fmodern\fcharset204\fprq1 ?l?r ??_fc Cyr{\*\falt ?l?r ??_fc};} +{\f759\fmodern\fcharset161\fprq1 ?l?r ??_fc Greek{\*\falt ?l?r ??_fc};}{\f760\fmodern\fcharset162\fprq1 ?l?r ??_fc Tur{\*\falt ?l?r ??_fc};}{\f761\fmodern\fcharset186\fprq1 ?l?r ??_fc Baltic{\*\falt ?l?r ??_fc};} +{\f1946\fswiss\fcharset0\fprq2 Times{\*\falt Times};}{\f1944\fswiss\fcharset238\fprq2 Times CE{\*\falt Times};}{\f1945\fswiss\fcharset204\fprq2 Times Cyr{\*\falt Times};}{\f1947\fswiss\fcharset161\fprq2 Times Greek{\*\falt Times};} +{\f1948\fswiss\fcharset162\fprq2 Times Tur{\*\falt Times};}{\f1949\fswiss\fcharset186\fprq2 Times Baltic{\*\falt Times};}{\f2322\fswiss\fcharset238\fprq2 Gill Sans MT Ext Condensed Bold CE;}}}}{\colortbl;\red0\green0\blue0;\red0\green0\blue255; +\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0;\red128\green0\blue128;\red128\green0\blue0; +\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;}{\stylesheet{\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af0\afs24\alang1025 \ltrch\fcs0 +\fs24\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 \snext0 \slink4095 Normal;}{\*\cs10 \additive \slink4095 \ssemihidden Default Paragraph Font;}{\* +\ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\tblind0\tblindtype3\tscellwidthfts0\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv +\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af0\afs20 \ltrch\fcs0 \fs20\lang1024\langfe1024\cgrid\langnp1024\langfenp1024 \snext11 \slink4095 \ssemihidden Normal Table;}}{\*\revtbl {Unknown;}{Samuel Armato;}}{\*\rsidtbl \rsid275633 +\rsid425483\rsid674282\rsid2706766\rsid6109801\rsid6382080\rsid6688667\rsid9767449\rsid11291463\rsid15666062}{\info{\title <}{\author Michael McNitt-Gray}{\operator Samuel Armato}{\creatim\yr2010\mo3\dy8\hr13\min3}{\revtim\yr2010\mo3\dy8\hr13\min19} +{\version4}{\edmins17}{\nofpages164}{\nofwords90484}{\nofchars515762}{\*\company UCLA}{\nofcharsws633391}{\vern25223}{\*\saveprevpict}{\*\password 00000000}}\paperw12240\paperh15840\margl1800\margr1800\margt1440\margb1440\gutter0\ltrsect +\ftnbj\aenddoc\donotembedsysfont0\donotembedlingdata1\grfdocevents0\validatexml0\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors0\noxlattoyen\expshrtn\noultrlspc\dntblnsbdb\nospaceforul\hyphcaps0\horzdoc\dghspace120\dgvspace120 +\dghorigin1701\dgvorigin1984\dghshow0\dgvshow3\jcompress\viewkind4\viewscale150\nolnhtadjtbl\rsidroot15666062\newtblstyruls\nogrowautofit \fet0{\*\wgrffmtfilter 013f}\ltrpar \sectd \ltrsect\linex0\sectdefaultcl\sftnbj {\*\pnseclvl1 +\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5 +\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang +{\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}\pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0\pararsid2706766 \rtlch\fcs1 \af0\afs24\alang1025 +\ltrch\fcs0 \fs24\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 {\rtlch\fcs1 \af0 \ltrch\fcs0 \insrsid2706766 Please note:\~ We recently (Fall 2008) discovered that, for a subset of cases, inconsistent ra +ting systems were used among the 5 sites with regard to the spiculation and lobulation characteristics of lesions identified as nodules > 3 mm.\~ The XML nodule characteristics data as it exists for some cases will be impacted by this error. }{ +\rtlch\fcs1 \af0 \ltrch\fcs0 \insrsid275633 We apologize for any inconvenience and we are in the process of correcting this situation.\~}{\rtlch\fcs1 \af0 \ltrch\fcs0 \insrsid2706766 +\par LIDC team +\par Jan 2009 +\par }\pard \ltrpar\ql \li0\ri0\nowidctlpar\wrapdefault\faauto\rin0\lin0\itap0 {\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid2706766 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Version header +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 + +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab links to schema and xsd file +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Beginning of the response header +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.7 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Version Number +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 2000-01-01 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Date of the original request to read the series +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 00:00:00 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Time of that request +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par anonymous +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Code for the site performing the read - set to anonymous +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 Second unblinded read +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Code for the reading task being requested (in this case, it is the second phase, unblinded read) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par 2000-01-01 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Date that the request was serviced (completed) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 00:00:00 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Time that the request was serviced +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1 - Reading complete +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6109801 Used for internal communication; only those files marked as }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 +\b\f2\fs20\insrsid6382080 "}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6109801 1-}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Readin}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6109801 +g complete" will be part of public release.}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1888 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab The DICOM field Series Instance UID (0020,000E) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of response header +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 A reading session consists of a set of markings done by a single reader at a single phase (either the blinded or unblinded phase \endash + in these files only the unblinded read results are reported) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 3.12 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab annotation version +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist id - set to anonymous +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Beginning of nodule marking information +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 4983 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Nodule identifier - unique id for nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab For a nodule > 3 mm, each reader is asked to subjectively assess the nodule's characteristics as described on LIDC CDE page http://cde}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 +\b\f2\fs20\insrsid674282 browser.nci.nih.gov/CDEBrowser/}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 and as follows: +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 5 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 Radiologist assessment of nodule subtle}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 ty +on 1-5 scale +\par (1 \tab extremely subtle +\par 2\tab +\par 3\tab +\par 4\tab \tab +\par 5 \tab obvious) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 \tab Radiologist assessment of nodule i}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 nternal structure +\par (1\tab soft tissue, +\par 2\tab fluid, +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 3\tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 fat, +\par 4\tab air) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 6 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 + of internal calcification of nodule +\par (1 \tab Popcorn \tab \tab Popcorn Appearance +\par 2 \tab Laminated \tab \tab Laminated Appearance +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\lang1040\langfe1033\langnp1040\insrsid6382080\charrsid2706766 3 \tab Solid \tab \tab Solid Appearance +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\lang1040\langfe1033\langnp1040\insrsid674282\charrsid2706766 4 \tab Non-Central \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\lang1040\langfe1033\langnp1040\insrsid6382080\charrsid2706766 +Non-Central Appearance +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 5 \tab Central \tab \tab Central Calcification +\par 6 \tab Absent) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 4 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 + of shape of nodule in terms of its roundness/sphericity with only 3 terms defined: +\par (1 \tab Linear \tab \tab Linear Appearance +\par 2 +\par 3 \tab Ovoid \tab \tab Ovoid Appearance +\par 4 +\par 5 \tab Round \tab \tab Round Appearance)\tab \tab +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 \tab 4 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 of }{\rtlch\fcs1 +\ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 nodule}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 margin on a 1-5 scale with only the extreme values explicitly defined: +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 (1 \tab Poorly Defined}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Poorly Defined +\par 5 \tab Sharp \tab \tab Sharp Margin) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 + of nodule lobulation on a 1-5 scale with only the extreme values explicitly defined: +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 (1 \tab No Lobulation \tab \tab No}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Lobulation +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 5 \tab Marked}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 \tab \tab \tab Marked}{\rtlch\fcs1 +\ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Lobulation) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 + of nodule spiculation on a 1-5 scale with only the extreme values explicitly defined: +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 (1 \tab No Spiculation \tab \tab No}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Spiculation +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 5 \tab Marked}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 \tab \tab Marked}{\rtlch\fcs1 +\ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 Spiculation) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 5 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 assessment}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 of nodule internal texture +with only 3 terms defined: +\par (1 \tab Non-Solid/Ground }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 G}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 lass Opacity +\par 2 +\par 3 \tab Part Solid/Mixed +\par 4 +\par 5 \tab Solid Texture}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 )}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 5 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Radiologist subjective assessment of likelihood of mali}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 gnancy of this nodule (ASSUMING}{\rtlch\fcs1 \ab\af2\afs20 +\ltrch\fcs0 \b\f2\fs20\insrsid6382080 60-year-old male smoker ) +\par (1 \tab Highly Unlikely for Cancer +\par 2 \tab Moderately Unlikely for Cancer +\par 3 \tab Indeterminate Likelihood +\par 4 \tab Moderately Suspicious for Cancer +\par 5 \tab Highly Suspicious for Cancer) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of characteristics section +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 \tab +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab signifies beginning of region of interest description +\par \tab \tab roi is a boundary describing either: +\par \tab \tab (a) a single voxel for nodules < 3 mm +\par \tab \tab (b) a complete contour of the nodule that describes the first voxel just outside t +he nodule. The format is one where the z position is first specified (using imageZposition AND imageSOP_UID), then the coordinates of connected boundary points within each x-y plane. +\par +\par \tab \tab NOTE:}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 x-y plane convention is that (1,1}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 ) is upper left +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 -410.350006 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid15666062 \tab The third part of the tuplet in DICOM field (0020,0032}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 +)to describe z location of this slice on which the nodule is visualized and contoured +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 \tab 1.3.6.1.4.1.9328.50.3.1892 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080\charrsid6382080 DICOM field (0008,0018)of that location}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 +\b\f2\fs20\insrsid6688667\charrsid6382080 ; Please note that this also specifies the name of corresponding image file (}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080\charrsid6382080 1.3.6.1.4.1.9328.50.3.1892}{\rtlch\fcs1 \ab\af2\afs20 +\ltrch\fcs0 \b\f2\fs20\insrsid6688667\charrsid6382080 .dcm). The user can }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 open that image file to see the}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6688667\charrsid6382080 + image on which the nodule is visualized.}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080\charrsid6382080 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 \tab TRUE +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 \tab \tab El}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 ement to describe whether the following voxels are to be included (value = TRUE) or excluded (value=FALSE) + +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab Element to describe that this is beginning of the edge map - the collection of these points make up the boundary of the nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 412 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab x coordinate of edge map point +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 312 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab y coordinate of edge map point +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab end of edge map point\tab \tab +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab next edge map point - NOTE that this nodule has mor}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 e than on}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 +\b\f2\fs20\insrsid6382080 e point, indicating that this is a nodule > 3 mm +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 412 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab x coordinate of point +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 311 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab y coordinate of point +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab End of edge map - NOTE th}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid674282 at this nodule has more than on}{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 +\b\f2\fs20\insrsid6382080 e point, indicating that this is a nodule > 3 mm +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par 412 +\par 310 +\par +\par +\par 412 +\par 309 +\par +\par +\par 412 +\par 308 +\par +\par +\par 411 +\par 307 +\par +\par +\par 411 +\par 306 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 410 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 312 +\par +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab end of the roi definition for that z position (Slice) +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab the roi continues on to another image +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 -411.100006 +\par }\pard \ltrpar\ql \li0\ri0\nowidctlpar\wrapdefault\faauto\rin0\lin0\itap0\pararsid15666062 {\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid15666062 +The third part of the tuplet in DICOM field (0020,0032)to describe z location of this slice on which the nodule is visualized and contoured +\par }\pard \ltrpar\ql \li0\ri0\nowidctlpar\wrapdefault\faauto\rin0\lin0\itap0 {\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1893 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab \tab DICOM field (0008,0018) of that location +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 TRUE +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 312 +\par +\par +\par 413 +\par 311 +\par +\par +\par 413 +\par 310 +\par +\par +\par 413 +\par 309 +\par +\par +\par 413 +\par 308 +\par +\par +\par 413 +\par 307 +\par +\par +\par 412 +\par 306 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 313 +\par +\par +\par +\par -411.850006 +\par 1.3.6.1.4.1.9328.50.3.1894 +\par TRUE +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 314 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 415 +\par 312 +\par +\par +\par 415 +\par 311 +\par +\par +\par 415 +\par 310 +\par +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 308 +\par +\par +\par 413 +\par 307 +\par +\par +\par 413 +\par 306 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 304 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 305 +\par +\par +\par 407 +\par 305 +\par +\par +\par 406 +\par 305 +\par +\par +\par 407 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 407 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 412 +\par 315 +\par +\par +\par 413 +\par 315 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -412.600006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1895 +\par TRUE +\par +\par 416 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 415 +\par 309 +\par +\par +\par 415 +\par 308 +\par +\par +\par 415 +\par 307 +\par +\par +\par 415 +\par 306 +\par +\par +\par 414 +\par 305 +\par +\par +\par 414 +\par 304 +\par +\par +\par 414 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 304 +\par +\par +\par 407 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 405 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 409 +\par 315 +\par +\par +\par 410 +\par 316 +\par +\par +\par 411 +\par 316 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 316 +\par +\par +\par 414 +\par 316 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 317 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -413.350006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1896 +\par TRUE +\par +\par 415 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 415 +\par 305 +\par +\par +\par 415 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 317 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -414.100006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1897 +\par TRUE +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 318 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 415 +\par 306 +\par +\par +\par 415 +\par 305 +\par +\par +\par 415 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 412 +\par 319 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -414.850006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1898 +\par TRUE +\par +\par 414 +\par 320 +\par +\par +\par 414 +\par 319 +\par +\par +\par 415 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 320 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -415.600006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1899 +\par TRUE +\par +\par 416 +\par 323 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 418 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 418 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 408 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -416.350006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1900 +\par TRUE +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 420 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 402 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 318 +\par +\par +\par 408 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -417.100006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1901 +\par TRUE +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 322 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 401 +\par 308 +\par +\par +\par 402 +\par 308 +\par +\par +\par 403 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -417.850006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1902 +\par TRUE +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 423 +\par 316 +\par +\par +\par 424 +\par 315 +\par +\par +\par 424 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 423 +\par 312 +\par +\par +\par 423 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 408 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid2706766 +\par +\par +\par -418.600006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1903 +\par TRUE +\par +\par 419 +\par 324 +\par +\par +\par 420 +\par 323 +\par +\par +\par 421 +\par 322 +\par +\par +\par 421 +\par 321 +\par +\par +\par 421 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 424 +\par 312 +\par +\par +\par 424 +\par 311 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 309 +\par +\par +\par 423 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 300 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 404 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 321 +\par +\par +\par 409 +\par 322 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 324 +\par +\par +\par 416 +\par 324 +\par +\par +\par 417 +\par 324 +\par +\par +\par 418 +\par 324 +\par +\par +\par 419 +\par 324 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid275633 +\par +\par +\par -419.350006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1904 +\par TRUE +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 322 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 322 +\par +\par +\par 420 +\par 321 +\par +\par +\par 421 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 300 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 406 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid275633 +\par +\par +\par -420.100006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1905 +\par TRUE +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 322 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 423 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 423 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 420 +\par 300 +\par +\par +\par 420 +\par 299 +\par +\par +\par 419 +\par 299 +\par +\par +\par 418 +\par 299 +\par +\par +\par 417 +\par 299 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 406 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 321 +\par +\par +\par 409 +\par 322 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid275633 +\par +\par +\par -420.850006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1906 +\par TRUE +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 422 +\par 301 +\par +\par +\par 421 +\par 300 +\par +\par +\par 420 +\par 299 +\par +\par +\par 419 +\par 299 +\par +\par +\par 418 +\par 299 +\par +\par +\par 417 +\par 299 +\par +\par +\par 416 +\par 299 +\par +\par +\par 415 +\par 299 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 299 +\par +\par +\par 410 +\par 299 +\par +\par +\par 409 +\par 299 +\par +\par +\par 408 +\par 300 +\par +\par +\par 409 +\par 301 +\par +\par +\par 409 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 407 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 406 +\par 308 +\par +\par +\par 406 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 321 +\par +\par +\par 409 +\par 322 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\lang1036\langfe1033\langnp1036\insrsid6382080\charrsid275633 +\par +\par +\par -421.600006 +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1907 +\par TRUE +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 423 +\par 312 +\par +\par +\par 424 +\par 311 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 309 +\par +\par +\par 424 +\par 308 +\par +\par +\par 424 +\par 307 +\par +\par +\par 423 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 407 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 406 +\par 320 +\par +\par +\par 407 +\par 321 +\par +\par +\par 408 +\par 322 +\par +\par +\par 409 +\par 323 +\par +\par +\par +\par -422.350006 +\par 1.3.6.1.4.1.9328.50.3.1908 +\par TRUE +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 319 +\par +\par +\par 422 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 409 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par +\par -423.100006 +\par 1.3.6.1.4.1.9328.50.3.1909 +\par TRUE +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 423 +\par 316 +\par +\par +\par 424 +\par 315 +\par +\par +\par 424 +\par 314 +\par +\par +\par 424 +\par 313 +\par +\par +\par 424 +\par 312 +\par +\par +\par 423 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 420 +\par 303 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 304 +\par +\par +\par 410 +\par 305 +\par +\par +\par 410 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 413 +\par 318 +\par +\par +\par 414 +\par 319 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 320 +\par +\par +\par +\par -423.850006 +\par 1.3.6.1.4.1.9328.50.3.1910 +\par TRUE +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 318 +\par +\par +\par 423 +\par 318 +\par +\par +\par 424 +\par 317 +\par +\par +\par 424 +\par 316 +\par +\par +\par 424 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 422 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 411 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 315 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 413 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 321 +\par +\par +\par +\par -424.600006 +\par 1.3.6.1.4.1.9328.50.3.1911 +\par TRUE +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 318 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 304 +\par +\par +\par 414 +\par 305 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 306 +\par +\par +\par 411 +\par 307 +\par +\par +\par 411 +\par 308 +\par +\par +\par 410 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 311 +\par +\par +\par 411 +\par 311 +\par +\par +\par 412 +\par 311 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 313 +\par +\par +\par 413 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 413 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 320 +\par +\par +\par +\par -425.350006 +\par 1.3.6.1.4.1.9328.50.3.1912 +\par TRUE +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 318 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 416 +\par 305 +\par +\par +\par 415 +\par 306 +\par +\par +\par 414 +\par 306 +\par +\par +\par 414 +\par 307 +\par +\par +\par 413 +\par 308 +\par +\par +\par 413 +\par 309 +\par +\par +\par 412 +\par 310 +\par +\par +\par 411 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 320 +\par +\par +\par +\par -426.100006 +\par 1.3.6.1.4.1.9328.50.3.1913 +\par TRUE +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 319 +\par +\par +\par 415 +\par 318 +\par +\par +\par 416 +\par 317 +\par +\par +\par 417 +\par 317 +\par +\par +\par 418 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 415 +\par 310 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 311 +\par +\par +\par 412 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par +\par -426.850006 +\par 1.3.6.1.4.1.9328.50.3.1914 +\par TRUE +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 415 +\par 311 +\par +\par +\par 414 +\par 310 +\par +\par +\par 414 +\par 311 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 313 +\par +\par +\par 413 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 413 +\par 318 +\par +\par +\par 414 +\par 318 +\par +\par +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Beginning of next nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 4988 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Nodule id, etc. +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par 4 +\par 1 +\par 6 +\par 2 +\par 5 +\par 2 +\par 1 +\par 5 +\par 4 +\par +\par +\par -440.350006 +\par 1.3.6.1.4.1.9328.50.3.1922 +\par TRUE +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 432 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 301 +\par +\par +\par 430 +\par 302 +\par +\par +\par 429 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 311 +\par +\par +\par +\par -441.100006 +\par 1.3.6.1.4.1.9328.50.3.1923 +\par TRUE +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 311 +\par +\par +\par +\par -441.850006 +\par 1.3.6.1.4.1.9328.50.3.1924 +\par TRUE +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 300 +\par +\par +\par 429 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 431 +\par 312 +\par +\par +\par +\par -442.600006 +\par 1.3.6.1.4.1.9328.50.3.1925 +\par TRUE +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 299 +\par +\par +\par 427 +\par 299 +\par +\par +\par 426 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 426 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 427 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par +\par -443.350006 +\par 1.3.6.1.4.1.9328.50.3.1926 +\par TRUE +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 433 +\par 297 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 297 +\par +\par +\par 429 +\par 297 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 426 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par +\par -444.100006 +\par 1.3.6.1.4.1.9328.50.3.1927 +\par TRUE +\par +\par 433 +\par 314 +\par +\par +\par 434 +\par 313 +\par +\par +\par 434 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 434 +\par 298 +\par +\par +\par 433 +\par 297 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 297 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 299 +\par +\par +\par 427 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 427 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 429 +\par 312 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 314 +\par +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 314 +\par +\par +\par +\par -444.850006 +\par 1.3.6.1.4.1.9328.50.3.1928 +\par TRUE +\par +\par 433 +\par 314 +\par +\par +\par 434 +\par 313 +\par +\par +\par 434 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 299 +\par +\par +\par 427 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 314 +\par +\par +\par +\par -445.600006 +\par 1.3.6.1.4.1.9328.50.3.1929 +\par TRUE +\par +\par 433 +\par 315 +\par +\par +\par 434 +\par 314 +\par +\par +\par 434 +\par 313 +\par +\par +\par 435 +\par 312 +\par +\par +\par 435 +\par 311 +\par +\par +\par 435 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 435 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 435 +\par 302 +\par +\par +\par 435 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 314 +\par +\par +\par 433 +\par 315 +\par +\par +\par +\par -446.350006 +\par 1.3.6.1.4.1.9328.50.3.1930 +\par TRUE +\par +\par 433 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 435 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 436 +\par 304 +\par +\par +\par 436 +\par 303 +\par +\par +\par 436 +\par 302 +\par +\par +\par 435 +\par 301 +\par +\par +\par 434 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 301 +\par +\par +\par 430 +\par 301 +\par +\par +\par 429 +\par 301 +\par +\par +\par 429 +\par 302 +\par +\par +\par 428 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 312 +\par +\par +\par 433 +\par 313 +\par +\par +\par +\par -447.100006 +\par 1.3.6.1.4.1.9328.50.3.1931 +\par TRUE +\par +\par 434 +\par 311 +\par +\par +\par 435 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 436 +\par 308 +\par +\par +\par 436 +\par 307 +\par +\par +\par 436 +\par 306 +\par +\par +\par 436 +\par 305 +\par +\par +\par 436 +\par 304 +\par +\par +\par 436 +\par 303 +\par +\par +\par 435 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 301 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 302 +\par +\par +\par 430 +\par 302 +\par +\par +\par 429 +\par 302 +\par +\par +\par 429 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 430 +\par 308 +\par +\par +\par 431 +\par 309 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 311 +\par +\par +\par 434 +\par 311 +\par +\par +\par +\par +\par 4990 +\par +\par -480.850006 +\par 1.3.6.1.4.1.9328.50.3.1932 +\par TRUE +\par +\par 71 +\par 250 +\par +\par +\par +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab N +on-Nodule > 3 mm; These are objects that a radiologist observed, but did not consider as a candidate for a nodule; these are mainly meant to identify objects NOT to be consdired for anaylsis by a CAD system. +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par 5002 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab id of non-nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 -360.850006 +\par }\pard \ltrpar\ql \li0\ri0\nowidctlpar\wrapdefault\faauto\rin0\lin0\itap0\pararsid15666062 {\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid15666062 +The third part of the tuplet in DICOM field (0020,0032)to describe z location of this slice on which the non-nodule is visualized and marked +\par }\pard \ltrpar\ql \li0\ri0\nowidctlpar\wrapdefault\faauto\rin0\lin0\itap0 {\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 1.3.6.1.4.1.9328.50.3.1934 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab DICOM field (0008,0018) of that location +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Locus is unique to non-nodules and indicates at least one point is to follow +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 296 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab x coordinate location of non-nodule marked by radiologist +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 331 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab y coordinate location of non-nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of locus +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of this non-nodule +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab Beginning of next non-nodule, etc. +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 5003 +\par -380.350006 +\par 1.3.6.1.4.1.9328.50.3.1936 +\par +\par 151 +\par 301 +\par +\par +\par +\par 4992 +\par -487.600006 +\par 1.3.6.1.4.1.9328.50.3.1942 +\par +\par 391 +\par 396 +\par +\par +\par +\par 4993 +\par -495.850006 +\par 1.3.6.1.4.1.9328.50.3.1944 +\par +\par 417 +\par 254 +\par +\par +\par +\par 4994 +\par -510.100006 +\par 1.3.6.1.4.1.9328.50.3.1945 +\par +\par 332 +\par 388 +\par +\par +\par +\par 4995 +\par -519.099976 +\par 1.3.6.1.4.1.9328.50.3.1948 +\par +\par 182 +\par 154 +\par +\par +\par +\par 4996 +\par -594.099976 +\par 1.3.6.1.4.1.9328.50.3.1950 +\par +\par 189 +\par 360 +\par +\par +\par +\par +\par +\par +\par +\par Nodule 001 +\par +\par 4 +\par 1 +\par 6 +\par 2 +\par 4 +\par 2 +\par 2 +\par 5 +\par 5 +\par +\par +\par -427.600006 +\par 1.3.6.1.4.1.9328.50.3.1915 +\par TRUE +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 314 +\par +\par +\par 414 +\par 313 +\par +\par +\par 413 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par +\par -427.600006 +\par 1.3.6.1.4.1.9328.50.3.1915 +\par TRUE +\par +\par 415 +\par 313 +\par +\par +\par 414 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 415 +\par 313 +\par +\par +\par +\par -426.850006 +\par 1.3.6.1.4.1.9328.50.3.1914 +\par TRUE +\par +\par 413 +\par 309 +\par +\par +\par 412 +\par 310 +\par +\par +\par 412 +\par 311 +\par +\par +\par 412 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 318 +\par +\par +\par 416 +\par 318 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 418 +\par 315 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 418 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 417 +\par 309 +\par +\par +\par 416 +\par 309 +\par +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 309 +\par +\par +\par 413 +\par 309 +\par +\par +\par +\par -426.100006 +\par 1.3.6.1.4.1.9328.50.3.1913 +\par TRUE +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 311 +\par +\par +\par 412 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 410 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 319 +\par +\par +\par 415 +\par 318 +\par +\par +\par 416 +\par 318 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 418 +\par 315 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 415 +\par 309 +\par +\par +\par +\par -425.350006 +\par 1.3.6.1.4.1.9328.50.3.1912 +\par TRUE +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 303 +\par +\par +\par 415 +\par 304 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 306 +\par +\par +\par 412 +\par 307 +\par +\par +\par 412 +\par 308 +\par +\par +\par 412 +\par 309 +\par +\par +\par 412 +\par 310 +\par +\par +\par 412 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 411 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 410 +\par 316 +\par +\par +\par 410 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 420 +\par 314 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 418 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par +\par -424.600006 +\par 1.3.6.1.4.1.9328.50.3.1911 +\par TRUE +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 304 +\par +\par +\par 413 +\par 305 +\par +\par +\par 413 +\par 306 +\par +\par +\par 412 +\par 306 +\par +\par +\par 411 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par +\par -423.850006 +\par 1.3.6.1.4.1.9328.50.3.1910 +\par TRUE +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 305 +\par +\par +\par 411 +\par 306 +\par +\par +\par 411 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 410 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 411 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 411 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 302 +\par +\par +\par +\par -423.100006 +\par 1.3.6.1.4.1.9328.50.3.1909 +\par TRUE +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 413 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 413 +\par 318 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par +\par -422.350006 +\par 1.3.6.1.4.1.9328.50.3.1908 +\par TRUE +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par +\par -421.600006 +\par 1.3.6.1.4.1.9328.50.3.1907 +\par TRUE +\par +\par 409 +\par 300 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 407 +\par 303 +\par +\par +\par 407 +\par 304 +\par +\par +\par 407 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 406 +\par 307 +\par +\par +\par 406 +\par 308 +\par +\par +\par 406 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 405 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 404 +\par 320 +\par +\par +\par 405 +\par 321 +\par +\par +\par 406 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 323 +\par +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 324 +\par +\par +\par 411 +\par 324 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 300 +\par +\par +\par 409 +\par 300 +\par +\par +\par +\par -420.850006 +\par 1.3.6.1.4.1.9328.50.3.1906 +\par TRUE +\par +\par 412 +\par 299 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 407 +\par 306 +\par +\par +\par 406 +\par 307 +\par +\par +\par 406 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 404 +\par 315 +\par +\par +\par 404 +\par 316 +\par +\par +\par 403 +\par 317 +\par +\par +\par 403 +\par 318 +\par +\par +\par 404 +\par 319 +\par +\par +\par 404 +\par 320 +\par +\par +\par 405 +\par 321 +\par +\par +\par 406 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 323 +\par +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 323 +\par +\par +\par 411 +\par 324 +\par +\par +\par 412 +\par 324 +\par +\par +\par 413 +\par 324 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 322 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 420 +\par 300 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 299 +\par +\par +\par 413 +\par 299 +\par +\par +\par 412 +\par 299 +\par +\par +\par +\par -420.100006 +\par 1.3.6.1.4.1.9328.50.3.1905 +\par TRUE +\par +\par 418 +\par 298 +\par +\par +\par 417 +\par 299 +\par +\par +\par 416 +\par 299 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 403 +\par 313 +\par +\par +\par 403 +\par 314 +\par +\par +\par 402 +\par 315 +\par +\par +\par 402 +\par 316 +\par +\par +\par 403 +\par 317 +\par +\par +\par 403 +\par 318 +\par +\par +\par 404 +\par 319 +\par +\par +\par 405 +\par 320 +\par +\par +\par 406 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 323 +\par +\par +\par 409 +\par 324 +\par +\par +\par 410 +\par 324 +\par +\par +\par 411 +\par 324 +\par +\par +\par 412 +\par 324 +\par +\par +\par 413 +\par 324 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 324 +\par +\par +\par 416 +\par 324 +\par +\par +\par 417 +\par 324 +\par +\par +\par 418 +\par 324 +\par +\par +\par 419 +\par 324 +\par +\par +\par 420 +\par 323 +\par +\par +\par 421 +\par 322 +\par +\par +\par 421 +\par 321 +\par +\par +\par 421 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 421 +\par 300 +\par +\par +\par 420 +\par 299 +\par +\par +\par 419 +\par 298 +\par +\par +\par 418 +\par 298 +\par +\par +\par +\par -419.350006 +\par 1.3.6.1.4.1.9328.50.3.1904 +\par TRUE +\par +\par 409 +\par 299 +\par +\par +\par 408 +\par 300 +\par +\par +\par 407 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 403 +\par 313 +\par +\par +\par 403 +\par 314 +\par +\par +\par 403 +\par 315 +\par +\par +\par 403 +\par 316 +\par +\par +\par 404 +\par 317 +\par +\par +\par 404 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 405 +\par 320 +\par +\par +\par 406 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 322 +\par +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 323 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 324 +\par +\par +\par 413 +\par 324 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 324 +\par +\par +\par 416 +\par 324 +\par +\par +\par 417 +\par 324 +\par +\par +\par 418 +\par 323 +\par +\par +\par 419 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 300 +\par +\par +\par 409 +\par 299 +\par +\par +\par +\par -418.600006 +\par 1.3.6.1.4.1.9328.50.3.1903 +\par TRUE +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 403 +\par 313 +\par +\par +\par 403 +\par 314 +\par +\par +\par 403 +\par 315 +\par +\par +\par 404 +\par 316 +\par +\par +\par 404 +\par 317 +\par +\par +\par 404 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 406 +\par 320 +\par +\par +\par 407 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 323 +\par +\par +\par 409 +\par 324 +\par +\par +\par 410 +\par 325 +\par +\par +\par 411 +\par 325 +\par +\par +\par 412 +\par 325 +\par +\par +\par 413 +\par 325 +\par +\par +\par 414 +\par 326 +\par +\par +\par 415 +\par 326 +\par +\par +\par 416 +\par 326 +\par +\par +\par 417 +\par 325 +\par +\par +\par 418 +\par 324 +\par +\par +\par 419 +\par 323 +\par +\par +\par 419 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 419 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par +\par -417.850006 +\par 1.3.6.1.4.1.9328.50.3.1902 +\par TRUE +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 403 +\par 313 +\par +\par +\par 403 +\par 314 +\par +\par +\par 403 +\par 315 +\par +\par +\par 403 +\par 316 +\par +\par +\par 403 +\par 317 +\par +\par +\par 404 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 406 +\par 320 +\par +\par +\par 407 +\par 321 +\par +\par +\par 408 +\par 322 +\par +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 324 +\par +\par +\par 411 +\par 324 +\par +\par +\par 412 +\par 324 +\par +\par +\par 413 +\par 325 +\par +\par +\par 414 +\par 325 +\par +\par +\par 415 +\par 325 +\par +\par +\par 416 +\par 324 +\par +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 301 +\par +\par +\par +\par -417.100006 +\par 1.3.6.1.4.1.9328.50.3.1901 +\par TRUE +\par +\par 406 +\par 299 +\par +\par +\par 405 +\par 300 +\par +\par +\par 406 +\par 301 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 403 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 401 +\par 307 +\par +\par +\par 400 +\par 308 +\par +\par +\par 400 +\par 309 +\par +\par +\par 401 +\par 310 +\par +\par +\par 402 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 405 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 405 +\par 320 +\par +\par +\par 406 +\par 321 +\par +\par +\par 407 +\par 322 +\par +\par +\par 408 +\par 323 +\par +\par +\par 409 +\par 323 +\par +\par +\par 410 +\par 323 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 324 +\par +\par +\par 413 +\par 324 +\par +\par +\par 414 +\par 324 +\par +\par +\par 415 +\par 324 +\par +\par +\par 416 +\par 324 +\par +\par +\par 417 +\par 324 +\par +\par +\par 418 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 422 +\par 309 +\par +\par +\par 423 +\par 308 +\par +\par +\par 423 +\par 307 +\par +\par +\par 423 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 300 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 302 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 408 +\par 301 +\par +\par +\par 408 +\par 300 +\par +\par +\par 407 +\par 299 +\par +\par +\par 406 +\par 299 +\par +\par +\par +\par -416.350006 +\par 1.3.6.1.4.1.9328.50.3.1900 +\par TRUE +\par +\par 407 +\par 300 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 403 +\par 304 +\par +\par +\par 402 +\par 305 +\par +\par +\par 401 +\par 306 +\par +\par +\par 401 +\par 307 +\par +\par +\par 400 +\par 308 +\par +\par +\par 399 +\par 309 +\par +\par +\par 398 +\par 310 +\par +\par +\par 399 +\par 311 +\par +\par +\par 400 +\par 311 +\par +\par +\par 401 +\par 311 +\par +\par +\par 402 +\par 312 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 404 +\par 315 +\par +\par +\par 404 +\par 316 +\par +\par +\par 404 +\par 317 +\par +\par +\par 405 +\par 318 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 321 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 419 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 300 +\par +\par +\par 407 +\par 300 +\par +\par +\par +\par -415.600006 +\par 1.3.6.1.4.1.9328.50.3.1899 +\par TRUE +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 403 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 401 +\par 307 +\par +\par +\par 400 +\par 308 +\par +\par +\par 400 +\par 309 +\par +\par +\par 400 +\par 310 +\par +\par +\par 401 +\par 311 +\par +\par +\par 402 +\par 312 +\par +\par +\par 403 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 408 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 417 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par +\par -414.850006 +\par 1.3.6.1.4.1.9328.50.3.1898 +\par TRUE +\par +\par 405 +\par 300 +\par +\par +\par 404 +\par 301 +\par +\par +\par 404 +\par 302 +\par +\par +\par 403 +\par 303 +\par +\par +\par 403 +\par 304 +\par +\par +\par 403 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 401 +\par 307 +\par +\par +\par 401 +\par 308 +\par +\par +\par 401 +\par 309 +\par +\par +\par 401 +\par 310 +\par +\par +\par 402 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 408 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 318 +\par +\par +\par 416 +\par 317 +\par +\par +\par 417 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 418 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 300 +\par +\par +\par 406 +\par 300 +\par +\par +\par 405 +\par 300 +\par +\par +\par +\par -414.100006 +\par 1.3.6.1.4.1.9328.50.3.1897 +\par TRUE +\par +\par 405 +\par 301 +\par +\par +\par 404 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 403 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 402 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 401 +\par 310 +\par +\par +\par 402 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 318 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 415 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 415 +\par 312 +\par +\par +\par 415 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 301 +\par +\par +\par +\par -413.350006 +\par 1.3.6.1.4.1.9328.50.3.1896 +\par TRUE +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 402 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 412 +\par 318 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 302 +\par +\par +\par +\par -412.600006 +\par 1.3.6.1.4.1.9328.50.3.1895 +\par TRUE +\par +\par 405 +\par 301 +\par +\par +\par 404 +\par 302 +\par +\par +\par 403 +\par 303 +\par +\par +\par 403 +\par 304 +\par +\par +\par 402 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 401 +\par 307 +\par +\par +\par 401 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 402 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 316 +\par +\par +\par 415 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 301 +\par +\par +\par +\par -411.850006 +\par 1.3.6.1.4.1.9328.50.3.1894 +\par TRUE +\par +\par 404 +\par 303 +\par +\par +\par 403 +\par 304 +\par +\par +\par 402 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 402 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 412 +\par 315 +\par +\par +\par 413 +\par 316 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 415 +\par 306 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 305 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 304 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 303 +\par +\par +\par 404 +\par 303 +\par +\par +\par +\par -411.100006 +\par 1.3.6.1.4.1.9328.50.3.1893 +\par TRUE +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 304 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 304 +\par +\par +\par 407 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 406 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 410 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 415 +\par 307 +\par +\par +\par 415 +\par 306 +\par +\par +\par 414 +\par 305 +\par +\par +\par 414 +\par 304 +\par +\par +\par +\par -410.350006 +\par 1.3.6.1.4.1.9328.50.3.1892 +\par TRUE +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 313 +\par +\par +\par 414 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 415 +\par 311 +\par +\par +\par 414 +\par 310 +\par +\par +\par 414 +\par 309 +\par +\par +\par 414 +\par 308 +\par +\par +\par 413 +\par 307 +\par +\par +\par 413 +\par 306 +\par +\par +\par 413 +\par 305 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par +\par +\par Nodule 002 +\par +\par 3 +\par 1 +\par 6 +\par 2 +\par 5 +\par 1 +\par 1 +\par 5 +\par 3 +\par +\par +\par -446.350006 +\par 1.3.6.1.4.1.9328.50.3.1930 +\par TRUE +\par +\par 427 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 426 +\par 301 +\par +\par +\par 425 +\par 302 +\par +\par +\par 425 +\par 303 +\par +\par +\par 425 +\par 304 +\par +\par +\par 425 +\par 305 +\par +\par +\par 425 +\par 306 +\par +\par +\par 425 +\par 307 +\par +\par +\par 425 +\par 308 +\par +\par +\par 425 +\par 309 +\par +\par +\par 425 +\par 310 +\par +\par +\par 426 +\par 311 +\par +\par +\par 427 +\par 311 +\par +\par +\par 428 +\par 311 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 311 +\par +\par +\par 433 +\par 311 +\par +\par +\par 434 +\par 311 +\par +\par +\par 435 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 435 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 299 +\par +\par +\par 427 +\par 299 +\par +\par +\par +\par -445.600006 +\par 1.3.6.1.4.1.9328.50.3.1929 +\par TRUE +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 425 +\par 301 +\par +\par +\par 425 +\par 302 +\par +\par +\par 425 +\par 303 +\par +\par +\par 425 +\par 304 +\par +\par +\par 425 +\par 305 +\par +\par +\par 425 +\par 306 +\par +\par +\par 425 +\par 307 +\par +\par +\par 425 +\par 308 +\par +\par +\par 425 +\par 309 +\par +\par +\par 425 +\par 310 +\par +\par +\par 425 +\par 311 +\par +\par +\par 425 +\par 312 +\par +\par +\par 426 +\par 313 +\par +\par +\par 427 +\par 314 +\par +\par +\par 428 +\par 314 +\par +\par +\par 429 +\par 314 +\par +\par +\par 430 +\par 314 +\par +\par +\par 431 +\par 314 +\par +\par +\par 432 +\par 314 +\par +\par +\par 433 +\par 314 +\par +\par +\par 434 +\par 313 +\par +\par +\par 434 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 435 +\par 300 +\par +\par +\par 435 +\par 299 +\par +\par +\par 434 +\par 298 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 298 +\par +\par +\par +\par -444.850006 +\par 1.3.6.1.4.1.9328.50.3.1928 +\par TRUE +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 299 +\par +\par +\par 427 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 426 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 426 +\par 306 +\par +\par +\par 426 +\par 307 +\par +\par +\par 426 +\par 308 +\par +\par +\par 426 +\par 309 +\par +\par +\par 426 +\par 310 +\par +\par +\par 426 +\par 311 +\par +\par +\par 426 +\par 312 +\par +\par +\par 427 +\par 313 +\par +\par +\par 428 +\par 313 +\par +\par +\par 429 +\par 314 +\par +\par +\par 430 +\par 315 +\par +\par +\par 431 +\par 315 +\par +\par +\par 432 +\par 315 +\par +\par +\par 433 +\par 314 +\par +\par +\par 433 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par +\par -444.100006 +\par 1.3.6.1.4.1.9328.50.3.1927 +\par TRUE +\par +\par 426 +\par 298 +\par +\par +\par 425 +\par 299 +\par +\par +\par 425 +\par 300 +\par +\par +\par 424 +\par 301 +\par +\par +\par 424 +\par 302 +\par +\par +\par 424 +\par 303 +\par +\par +\par 424 +\par 304 +\par +\par +\par 424 +\par 305 +\par +\par +\par 424 +\par 306 +\par +\par +\par 424 +\par 307 +\par +\par +\par 424 +\par 308 +\par +\par +\par 424 +\par 309 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 311 +\par +\par +\par 425 +\par 312 +\par +\par +\par 426 +\par 312 +\par +\par +\par 427 +\par 313 +\par +\par +\par 428 +\par 313 +\par +\par +\par 429 +\par 313 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 298 +\par +\par +\par +\par -443.350006 +\par 1.3.6.1.4.1.9328.50.3.1926 +\par TRUE +\par +\par 428 +\par 297 +\par +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 425 +\par 301 +\par +\par +\par 425 +\par 302 +\par +\par +\par 425 +\par 303 +\par +\par +\par 425 +\par 304 +\par +\par +\par 425 +\par 305 +\par +\par +\par 425 +\par 306 +\par +\par +\par 425 +\par 307 +\par +\par +\par 425 +\par 308 +\par +\par +\par 425 +\par 309 +\par +\par +\par 425 +\par 310 +\par +\par +\par 425 +\par 311 +\par +\par +\par 425 +\par 312 +\par +\par +\par 425 +\par 313 +\par +\par +\par 425 +\par 314 +\par +\par +\par 426 +\par 315 +\par +\par +\par 427 +\par 315 +\par +\par +\par 428 +\par 315 +\par +\par +\par 429 +\par 315 +\par +\par +\par 430 +\par 315 +\par +\par +\par 431 +\par 315 +\par +\par +\par 432 +\par 314 +\par +\par +\par 432 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 432 +\par 306 +\par +\par +\par 432 +\par 305 +\par +\par +\par 432 +\par 304 +\par +\par +\par 432 +\par 303 +\par +\par +\par 432 +\par 302 +\par +\par +\par 432 +\par 301 +\par +\par +\par 432 +\par 300 +\par +\par +\par 432 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 297 +\par +\par +\par 429 +\par 297 +\par +\par +\par 428 +\par 297 +\par +\par +\par +\par -442.600006 +\par 1.3.6.1.4.1.9328.50.3.1925 +\par TRUE +\par +\par 430 +\par 296 +\par +\par +\par 429 +\par 297 +\par +\par +\par 428 +\par 297 +\par +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 299 +\par +\par +\par 425 +\par 300 +\par +\par +\par 425 +\par 301 +\par +\par +\par 424 +\par 302 +\par +\par +\par 424 +\par 303 +\par +\par +\par 424 +\par 304 +\par +\par +\par 424 +\par 305 +\par +\par +\par 424 +\par 306 +\par +\par +\par 424 +\par 307 +\par +\par +\par 424 +\par 308 +\par +\par +\par 424 +\par 309 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 311 +\par +\par +\par 425 +\par 312 +\par +\par +\par 426 +\par 312 +\par +\par +\par 427 +\par 312 +\par +\par +\par 428 +\par 313 +\par +\par +\par 429 +\par 313 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 296 +\par +\par +\par +\par -441.850006 +\par 1.3.6.1.4.1.9328.50.3.1924 +\par TRUE +\par +\par 424 +\par 298 +\par +\par +\par 423 +\par 299 +\par +\par +\par 422 +\par 300 +\par +\par +\par 422 +\par 301 +\par +\par +\par 422 +\par 302 +\par +\par +\par 422 +\par 303 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 306 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 308 +\par +\par +\par 423 +\par 309 +\par +\par +\par 424 +\par 309 +\par +\par +\par 425 +\par 309 +\par +\par +\par 426 +\par 309 +\par +\par +\par 427 +\par 309 +\par +\par +\par 428 +\par 309 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 309 +\par +\par +\par 431 +\par 309 +\par +\par +\par 432 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 432 +\par 300 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 298 +\par +\par +\par 425 +\par 298 +\par +\par +\par 424 +\par 298 +\par +\par +\par +\par -441.100006 +\par 1.3.6.1.4.1.9328.50.3.1923 +\par TRUE +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 299 +\par +\par +\par 427 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 426 +\par 306 +\par +\par +\par 426 +\par 307 +\par +\par +\par 426 +\par 308 +\par +\par +\par 426 +\par 309 +\par +\par +\par 426 +\par 310 +\par +\par +\par 426 +\par 311 +\par +\par +\par 426 +\par 312 +\par +\par +\par 427 +\par 313 +\par +\par +\par 428 +\par 313 +\par +\par +\par 429 +\par 313 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 432 +\par 306 +\par +\par +\par 432 +\par 305 +\par +\par +\par 432 +\par 304 +\par +\par +\par 432 +\par 303 +\par +\par +\par 432 +\par 302 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 300 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par +\par -440.350006 +\par 1.3.6.1.4.1.9328.50.3.1922 +\par TRUE +\par +\par 426 +\par 301 +\par +\par +\par 425 +\par 302 +\par +\par +\par 425 +\par 303 +\par +\par +\par 425 +\par 304 +\par +\par +\par 425 +\par 305 +\par +\par +\par 425 +\par 306 +\par +\par +\par 425 +\par 307 +\par +\par +\par 425 +\par 308 +\par +\par +\par 425 +\par 309 +\par +\par +\par 426 +\par 310 +\par +\par +\par 427 +\par 310 +\par +\par +\par 428 +\par 310 +\par +\par +\par 429 +\par 310 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 310 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 432 +\par 302 +\par +\par +\par 431 +\par 302 +\par +\par +\par 430 +\par 301 +\par +\par +\par 429 +\par 302 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 301 +\par +\par +\par 426 +\par 301 +\par +\par +\par +\par +\par _Non-nodule 001 +\par -595.599976 +\par 1.3.6.1.4.1.9328.50.3.1951 +\par +\par 186 +\par 359 +\par +\par +\par +\par _Non-nodule 002 +\par -595.599976 +\par 1.3.6.1.4.1.9328.50.3.1951 +\par +\par 215 +\par 301 +\par +\par +\par +\par _Non-nodule 003 +\par -489.850006 +\par 1.3.6.1.4.1.9328.50.3.1953 +\par +\par 393 +\par 398 +\par +\par +\par +\par _Non-nodule 004 +\par -489.100006 +\par 1.3.6.1.4.1.9328.50.3.1943 +\par +\par 424 +\par 268 +\par +\par +\par +\par _Non-nodule 005 +\par -479.350006 +\par 1.3.6.1.4.1.9328.50.3.1941 +\par +\par 385 +\par 385 +\par +\par +\par +\par _Non-nodule 006 +\par -360.850006 +\par 1.3.6.1.4.1.9328.50.3.1934 +\par +\par 296 +\par 331 +\par +\par +\par +\par _Non-nodule 007 +\par -380.350006 +\par 1.3.6.1.4.1.9328.50.3.1936 +\par +\par 151 +\par 301 +\par +\par +\par +\par +\par 3.12 +\par +\par +\par 1796 +\par +\par -386.350006 +\par 1.3.6.1.4.1.9328.50.3.1889 +\par TRUE +\par +\par 362 +\par 362 +\par +\par +\par +\par +\par 1798 +\par +\par -386.350006 +\par 1.3.6.1.4.1.9328.50.3.1889 +\par TRUE +\par +\par 378 +\par 312 +\par +\par +\par +\par +\par 1799 +\par +\par -389.350006 +\par 1.3.6.1.4.1.9328.50.3.1890 +\par TRUE +\par +\par 369 +\par 364 +\par +\par +\par +\par +\par 1800 +\par +\par -393.850006 +\par 1.3.6.1.4.1.9328.50.3.1891 +\par TRUE +\par +\par 389 +\par 336 +\par +\par +\par +\par +\par 1775 +\par +\par 5 +\par 1 +\par 6 +\par 5 +\par 5 +\par 1 +\par 2 +\par 5 +\par 3 +\par +\par +\par -410.350006 +\par 1.3.6.1.4.1.9328.50.3.1892 +\par TRUE +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 312 +\par +\par +\par 413 +\par 312 +\par +\par +\par 414 +\par 311 +\par +\par +\par 414 +\par 310 +\par +\par +\par 414 +\par 309 +\par +\par +\par 414 +\par 308 +\par +\par +\par 414 +\par 307 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 304 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 410 +\par 313 +\par +\par +\par +\par -411.100006 +\par 1.3.6.1.4.1.9328.50.3.1893 +\par TRUE +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 314 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 314 +\par +\par +\par 414 +\par 313 +\par +\par +\par 415 +\par 314 +\par +\par +\par +\par -411.850006 +\par 1.3.6.1.4.1.9328.50.3.1894 +\par TRUE +\par +\par 416 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 304 +\par +\par +\par 407 +\par 305 +\par +\par +\par 406 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 406 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 412 +\par 315 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 315 +\par +\par +\par 416 +\par 316 +\par +\par +\par +\par -412.600006 +\par 1.3.6.1.4.1.9328.50.3.1895 +\par TRUE +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 316 +\par +\par +\par 415 +\par 316 +\par +\par +\par 415 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 416 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 408 +\par 314 +\par +\par +\par 409 +\par 315 +\par +\par +\par 410 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 317 +\par +\par +\par +\par -413.350006 +\par 1.3.6.1.4.1.9328.50.3.1896 +\par TRUE +\par +\par 415 +\par 317 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 416 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 317 +\par +\par +\par +\par -414.100006 +\par 1.3.6.1.4.1.9328.50.3.1897 +\par TRUE +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 319 +\par +\par +\par +\par -414.850006 +\par 1.3.6.1.4.1.9328.50.3.1898 +\par TRUE +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 319 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 417 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 402 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 402 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 408 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par +\par -415.600006 +\par 1.3.6.1.4.1.9328.50.3.1899 +\par TRUE +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 322 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 405 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 403 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 402 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 317 +\par +\par +\par 408 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 322 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par +\par -416.350006 +\par 1.3.6.1.4.1.9328.50.3.1900 +\par TRUE +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 419 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 401 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 319 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 322 +\par +\par +\par +\par -417.100006 +\par 1.3.6.1.4.1.9328.50.3.1901 +\par TRUE +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 403 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 323 +\par +\par +\par +\par -417.850006 +\par 1.3.6.1.4.1.9328.50.3.1902 +\par TRUE +\par +\par 418 +\par 324 +\par +\par +\par 418 +\par 323 +\par +\par +\par 419 +\par 322 +\par +\par +\par 420 +\par 321 +\par +\par +\par 421 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 422 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 423 +\par 316 +\par +\par +\par 423 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 423 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 324 +\par +\par +\par 418 +\par 324 +\par +\par +\par +\par -418.600006 +\par 1.3.6.1.4.1.9328.50.3.1903 +\par TRUE +\par +\par 418 +\par 323 +\par +\par +\par 419 +\par 322 +\par +\par +\par 420 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 406 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 323 +\par +\par +\par +\par -419.350006 +\par 1.3.6.1.4.1.9328.50.3.1904 +\par TRUE +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 419 +\par 321 +\par +\par +\par 420 +\par 320 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 423 +\par 312 +\par +\par +\par 423 +\par 311 +\par +\par +\par 423 +\par 310 +\par +\par +\par 423 +\par 309 +\par +\par +\par 423 +\par 308 +\par +\par +\par 423 +\par 307 +\par +\par +\par 423 +\par 306 +\par +\par +\par 423 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 300 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 303 +\par +\par +\par 407 +\par 304 +\par +\par +\par 406 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 404 +\par 314 +\par +\par +\par 404 +\par 315 +\par +\par +\par 404 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 405 +\par 318 +\par +\par +\par 406 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 321 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 323 +\par +\par +\par +\par -420.100006 +\par 1.3.6.1.4.1.9328.50.3.1905 +\par TRUE +\par +\par 417 +\par 323 +\par +\par +\par 418 +\par 322 +\par +\par +\par 418 +\par 321 +\par +\par +\par 419 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 423 +\par 312 +\par +\par +\par 423 +\par 311 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 309 +\par +\par +\par 424 +\par 308 +\par +\par +\par 423 +\par 307 +\par +\par +\par 423 +\par 306 +\par +\par +\par 422 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 421 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 303 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 405 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 405 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 322 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 323 +\par +\par +\par 416 +\par 323 +\par +\par +\par 417 +\par 323 +\par +\par +\par +\par -420.850006 +\par 1.3.6.1.4.1.9328.50.3.1906 +\par TRUE +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 320 +\par +\par +\par 419 +\par 319 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 424 +\par 312 +\par +\par +\par 424 +\par 311 +\par +\par +\par 424 +\par 310 +\par +\par +\par 424 +\par 309 +\par +\par +\par 424 +\par 308 +\par +\par +\par 424 +\par 307 +\par +\par +\par 424 +\par 306 +\par +\par +\par 424 +\par 305 +\par +\par +\par 424 +\par 304 +\par +\par +\par 423 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 300 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 407 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 408 +\par 319 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 321 +\par +\par +\par 410 +\par 321 +\par +\par +\par 411 +\par 322 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par +\par -421.600006 +\par 1.3.6.1.4.1.9328.50.3.1907 +\par TRUE +\par +\par 412 +\par 323 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 322 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 422 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 423 +\par 311 +\par +\par +\par 423 +\par 310 +\par +\par +\par 423 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 422 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 420 +\par 303 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 407 +\par 318 +\par +\par +\par 407 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 407 +\par 321 +\par +\par +\par 408 +\par 322 +\par +\par +\par 409 +\par 322 +\par +\par +\par 410 +\par 323 +\par +\par +\par 411 +\par 323 +\par +\par +\par 412 +\par 323 +\par +\par +\par +\par -422.350006 +\par 1.3.6.1.4.1.9328.50.3.1908 +\par TRUE +\par +\par 421 +\par 320 +\par +\par +\par 421 +\par 319 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 423 +\par 316 +\par +\par +\par 423 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 409 +\par 315 +\par +\par +\par 409 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 319 +\par +\par +\par 421 +\par 320 +\par +\par +\par +\par -423.100006 +\par 1.3.6.1.4.1.9328.50.3.1909 +\par TRUE +\par +\par 416 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 305 +\par +\par +\par 411 +\par 306 +\par +\par +\par 411 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 410 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 314 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 314 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 413 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 319 +\par +\par +\par +\par -423.850006 +\par 1.3.6.1.4.1.9328.50.3.1910 +\par TRUE +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 318 +\par +\par +\par 423 +\par 317 +\par +\par +\par 423 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 421 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 305 +\par +\par +\par 411 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 411 +\par 309 +\par +\par +\par 411 +\par 310 +\par +\par +\par 411 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 413 +\par 316 +\par +\par +\par 413 +\par 317 +\par +\par +\par 413 +\par 318 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 320 +\par +\par +\par +\par -424.600006 +\par 1.3.6.1.4.1.9328.50.3.1911 +\par TRUE +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 318 +\par +\par +\par 421 +\par 318 +\par +\par +\par 422 +\par 317 +\par +\par +\par 422 +\par 316 +\par +\par +\par 423 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 421 +\par 314 +\par +\par +\par 420 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 417 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 304 +\par +\par +\par 414 +\par 304 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 306 +\par +\par +\par 412 +\par 307 +\par +\par +\par 411 +\par 308 +\par +\par +\par 411 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 410 +\par 311 +\par +\par +\par 411 +\par 310 +\par +\par +\par 412 +\par 310 +\par +\par +\par 413 +\par 311 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 313 +\par +\par +\par 413 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par +\par -425.350006 +\par 1.3.6.1.4.1.9328.50.3.1912 +\par TRUE +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 317 +\par +\par +\par 422 +\par 317 +\par +\par +\par 423 +\par 317 +\par +\par +\par 424 +\par 316 +\par +\par +\par 423 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 421 +\par 314 +\par +\par +\par 420 +\par 314 +\par +\par +\par 419 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 418 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 418 +\par 304 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 307 +\par +\par +\par 412 +\par 307 +\par +\par +\par 411 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 410 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 410 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par +\par -425.350006 +\par 1.3.6.1.4.1.9328.50.3.1912 +\par FALSE +\par +\par 413 +\par 309 +\par +\par +\par 414 +\par 308 +\par +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 309 +\par +\par +\par +\par -426.100006 +\par 1.3.6.1.4.1.9328.50.3.1913 +\par TRUE +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 316 +\par +\par +\par 418 +\par 316 +\par +\par +\par 419 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 416 +\par 311 +\par +\par +\par 415 +\par 310 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 310 +\par +\par +\par 412 +\par 309 +\par +\par +\par 412 +\par 308 +\par +\par +\par 412 +\par 307 +\par +\par +\par 412 +\par 306 +\par +\par +\par 412 +\par 305 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 410 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 412 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 413 +\par 319 +\par +\par +\par +\par -426.850006 +\par 1.3.6.1.4.1.9328.50.3.1914 +\par TRUE +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 417 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 415 +\par 311 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 310 +\par +\par +\par 412 +\par 309 +\par +\par +\par 411 +\par 308 +\par +\par +\par 411 +\par 307 +\par +\par +\par 411 +\par 306 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 407 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 317 +\par +\par +\par +\par -427.600006 +\par 1.3.6.1.4.1.9328.50.3.1915 +\par TRUE +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 311 +\par +\par +\par 412 +\par 310 +\par +\par +\par 412 +\par 309 +\par +\par +\par 411 +\par 308 +\par +\par +\par 412 +\par 307 +\par +\par +\par 411 +\par 306 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par +\par -428.350006 +\par 1.3.6.1.4.1.9328.50.3.1916 +\par TRUE +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 311 +\par +\par +\par 412 +\par 310 +\par +\par +\par 413 +\par 309 +\par +\par +\par 412 +\par 308 +\par +\par +\par 411 +\par 308 +\par +\par +\par 410 +\par 307 +\par +\par +\par 409 +\par 306 +\par +\par +\par 408 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 407 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 313 +\par +\par +\par +\par -429.100006 +\par 1.3.6.1.4.1.9328.50.3.1917 +\par TRUE +\par +\par 409 +\par 312 +\par +\par +\par 410 +\par 311 +\par +\par +\par 411 +\par 310 +\par +\par +\par 411 +\par 309 +\par +\par +\par 410 +\par 308 +\par +\par +\par 410 +\par 307 +\par +\par +\par 409 +\par 306 +\par +\par +\par 408 +\par 306 +\par +\par +\par 407 +\par 307 +\par +\par +\par 406 +\par 308 +\par +\par +\par 406 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 409 +\par 312 +\par +\par +\par +\par -429.850006 +\par 1.3.6.1.4.1.9328.50.3.1918 +\par TRUE +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 312 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 310 +\par +\par +\par 410 +\par 309 +\par +\par +\par 409 +\par 308 +\par +\par +\par 408 +\par 307 +\par +\par +\par 407 +\par 306 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par +\par -430.600006 +\par 1.3.6.1.4.1.9328.50.3.1919 +\par TRUE +\par +\par 408 +\par 311 +\par +\par +\par 409 +\par 310 +\par +\par +\par 410 +\par 309 +\par +\par +\par 409 +\par 308 +\par +\par +\par 408 +\par 307 +\par +\par +\par 407 +\par 306 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 406 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 408 +\par 311 +\par +\par +\par +\par -431.350006 +\par 1.3.6.1.4.1.9328.50.3.1920 +\par TRUE +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 310 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 308 +\par +\par +\par 406 +\par 307 +\par +\par +\par 405 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par +\par +\par 1779 +\par +\par 3 +\par 1 +\par 6 +\par 3 +\par 5 +\par 1 +\par 1 +\par 5 +\par 3 +\par +\par +\par -439.600006 +\par 1.3.6.1.4.1.9328.50.3.1921 +\par TRUE +\par +\par 430 +\par 306 +\par +\par +\par 431 +\par 305 +\par +\par +\par 432 +\par 304 +\par +\par +\par 431 +\par 303 +\par +\par +\par 430 +\par 303 +\par +\par +\par 429 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 429 +\par 306 +\par +\par +\par 430 +\par 306 +\par +\par +\par +\par -440.350006 +\par 1.3.6.1.4.1.9328.50.3.1922 +\par TRUE +\par +\par 431 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 432 +\par 306 +\par +\par +\par 432 +\par 305 +\par +\par +\par 432 +\par 304 +\par +\par +\par 432 +\par 303 +\par +\par +\par 432 +\par 302 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 301 +\par +\par +\par 430 +\par 302 +\par +\par +\par 429 +\par 302 +\par +\par +\par 429 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 429 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 309 +\par +\par +\par 431 +\par 309 +\par +\par +\par +\par -441.100006 +\par 1.3.6.1.4.1.9328.50.3.1923 +\par TRUE +\par +\par 430 +\par 311 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 309 +\par +\par +\par 432 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 432 +\par 300 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 299 +\par +\par +\par 429 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par +\par -441.850006 +\par 1.3.6.1.4.1.9328.50.3.1924 +\par TRUE +\par +\par 431 +\par 312 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 425 +\par 305 +\par +\par +\par 426 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par +\par -442.600006 +\par 1.3.6.1.4.1.9328.50.3.1925 +\par TRUE +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 299 +\par +\par +\par 427 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 431 +\par 312 +\par +\par +\par +\par -443.350006 +\par 1.3.6.1.4.1.9328.50.3.1926 +\par TRUE +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 434 +\par 298 +\par +\par +\par 433 +\par 297 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 297 +\par +\par +\par 429 +\par 297 +\par +\par +\par 428 +\par 297 +\par +\par +\par 427 +\par 298 +\par +\par +\par 426 +\par 299 +\par +\par +\par 426 +\par 300 +\par +\par +\par 426 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 431 +\par 313 +\par +\par +\par +\par -444.100006 +\par 1.3.6.1.4.1.9328.50.3.1927 +\par TRUE +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 298 +\par +\par +\par 427 +\par 299 +\par +\par +\par 427 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 427 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 428 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 431 +\par 313 +\par +\par +\par 432 +\par 313 +\par +\par +\par +\par -444.850006 +\par 1.3.6.1.4.1.9328.50.3.1928 +\par TRUE +\par +\par 432 +\par 314 +\par +\par +\par 433 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 430 +\par 297 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 426 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 429 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 429 +\par 312 +\par +\par +\par 429 +\par 313 +\par +\par +\par 430 +\par 314 +\par +\par +\par 431 +\par 314 +\par +\par +\par 432 +\par 314 +\par +\par +\par +\par -445.600006 +\par 1.3.6.1.4.1.9328.50.3.1929 +\par TRUE +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 435 +\par 302 +\par +\par +\par 435 +\par 301 +\par +\par +\par 435 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 299 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 428 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 430 +\par 308 +\par +\par +\par 430 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par +\par -446.350006 +\par 1.3.6.1.4.1.9328.50.3.1930 +\par TRUE +\par +\par 433 +\par 313 +\par +\par +\par 434 +\par 312 +\par +\par +\par 434 +\par 311 +\par +\par +\par 435 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 435 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 435 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 300 +\par +\par +\par 430 +\par 301 +\par +\par +\par 429 +\par 301 +\par +\par +\par 428 +\par 302 +\par +\par +\par 428 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par 433 +\par 313 +\par +\par +\par +\par -447.100006 +\par 1.3.6.1.4.1.9328.50.3.1931 +\par TRUE +\par +\par 434 +\par 310 +\par +\par +\par 435 +\par 309 +\par +\par +\par 435 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 435 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 301 +\par +\par +\par 430 +\par 302 +\par +\par +\par 430 +\par 303 +\par +\par +\par 430 +\par 304 +\par +\par +\par 430 +\par 305 +\par +\par +\par 431 +\par 306 +\par +\par +\par 431 +\par 307 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 310 +\par +\par +\par 434 +\par 310 +\par +\par +\par +\par +\par 1783 +\par +\par -480.850006 +\par 1.3.6.1.4.1.9328.50.3.1932 +\par TRUE +\par +\par 71 +\par 250 +\par +\par +\par +\par +\par 1801 +\par +\par -567.849976 +\par 1.3.6.1.4.1.9328.50.3.1933 +\par TRUE +\par +\par 57 +\par 278 +\par +\par +\par +\par +\par 1794 +\par -360.850006 +\par 1.3.6.1.4.1.9328.50.3.1934 +\par +\par 296 +\par 331 +\par +\par +\par +\par 1802 +\par -369.100006 +\par 1.3.6.1.4.1.9328.50.3.1935 +\par +\par 318 +\par 351 +\par +\par +\par +\par 1795 +\par -380.350006 +\par 1.3.6.1.4.1.9328.50.3.1936 +\par +\par 151 +\par 301 +\par +\par +\par +\par 1809 +\par -441.100006 +\par 1.3.6.1.4.1.9328.50.3.1923 +\par +\par 276 +\par 199 +\par +\par +\par +\par 1810 +\par -470.350006 +\par 1.3.6.1.4.1.9328.50.3.1937 +\par +\par 390 +\par 390 +\par +\par +\par +\par 1803 +\par -471.850006 +\par 1.3.6.1.4.1.9328.50.3.1938 +\par +\par 386 +\par 379 +\par +\par +\par +\par 1804 +\par -474.100006 +\par 1.3.6.1.4.1.9328.50.3.1939 +\par +\par 436 +\par 308 +\par +\par +\par +\par 1811 +\par -476.350006 +\par 1.3.6.1.4.1.9328.50.3.1940 +\par +\par 437 +\par 297 +\par +\par +\par +\par 1793 +\par -479.350006 +\par 1.3.6.1.4.1.9328.50.3.1941 +\par +\par 385 +\par 385 +\par +\par +\par +\par 1784 +\par -487.600006 +\par 1.3.6.1.4.1.9328.50.3.1942 +\par +\par 391 +\par 396 +\par +\par +\par +\par 1792 +\par -489.100006 +\par 1.3.6.1.4.1.9328.50.3.1943 +\par +\par 424 +\par 268 +\par +\par +\par +\par 1785 +\par -495.850006 +\par 1.3.6.1.4.1.9328.50.3.1944 +\par +\par 417 +\par 254 +\par +\par +\par +\par 1786 +\par -510.100006 +\par 1.3.6.1.4.1.9328.50.3.1945 +\par +\par 332 +\par 388 +\par +\par +\par +\par 1805 +\par -510.850006 +\par 1.3.6.1.4.1.9328.50.3.1946 +\par +\par 348 +\par 396 +\par +\par +\par +\par 1812 +\par -513.099976 +\par 1.3.6.1.4.1.9328.50.3.1947 +\par +\par 319 +\par 410 +\par +\par +\par +\par 1787 +\par -519.099976 +\par 1.3.6.1.4.1.9328.50.3.1948 +\par +\par 182 +\par 154 +\par +\par +\par +\par 1806 +\par -567.849976 +\par 1.3.6.1.4.1.9328.50.3.1933 +\par +\par 220 +\par 302 +\par +\par +\par +\par 1807 +\par -590.349976 +\par 1.3.6.1.4.1.9328.50.3.1949 +\par +\par 213 +\par 289 +\par +\par +\par +\par 1788 +\par -594.099976 +\par 1.3.6.1.4.1.9328.50.3.1950 +\par +\par 189 +\par 360 +\par +\par +\par +\par 1790 +\par -595.599976 +\par 1.3.6.1.4.1.9328.50.3.1951 +\par +\par 215 +\par 301 +\par +\par +\par +\par 1808 +\par -598.599976 +\par 1.3.6.1.4.1.9328.50.3.1952 +\par +\par 201 +\par 345 +\par +\par +\par +\par +\par 3.12 +\par +\par +\par 0 +\par +\par 5 +\par 1 +\par 6 +\par 3 +\par 4 +\par 2 +\par 2 +\par 5 +\par 3 +\par +\par +\par -410.350006 +\par 1.3.6.1.4.1.9328.50.3.1892 +\par TRUE +\par +\par 410 +\par 311 +\par +\par +\par 411 +\par 311 +\par +\par +\par 412 +\par 311 +\par +\par +\par 413 +\par 311 +\par +\par +\par 411 +\par 310 +\par +\par +\par 410 +\par 310 +\par +\par +\par 410 +\par 311 +\par +\par +\par +\par -411.100006 +\par 1.3.6.1.4.1.9328.50.3.1893 +\par TRUE +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 311 +\par +\par +\par 414 +\par 310 +\par +\par +\par 414 +\par 309 +\par +\par +\par 415 +\par 308 +\par +\par +\par 415 +\par 307 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 305 +\par +\par +\par 411 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par +\par -411.850006 +\par 1.3.6.1.4.1.9328.50.3.1894 +\par TRUE +\par +\par 407 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 413 +\par 314 +\par +\par +\par 413 +\par 313 +\par +\par +\par 414 +\par 312 +\par +\par +\par 414 +\par 311 +\par +\par +\par 415 +\par 310 +\par +\par +\par 415 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 415 +\par 306 +\par +\par +\par 414 +\par 305 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 410 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par +\par -412.600006 +\par 1.3.6.1.4.1.9328.50.3.1895 +\par TRUE +\par +\par 405 +\par 312 +\par +\par +\par 406 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 408 +\par 313 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 314 +\par +\par +\par 411 +\par 314 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 415 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 303 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 306 +\par +\par +\par 409 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par +\par -413.350006 +\par 1.3.6.1.4.1.9328.50.3.1896 +\par TRUE +\par +\par 404 +\par 311 +\par +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 407 +\par 312 +\par +\par +\par 408 +\par 312 +\par +\par +\par 409 +\par 313 +\par +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 314 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 314 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 415 +\par 312 +\par +\par +\par 415 +\par 311 +\par +\par +\par 415 +\par 310 +\par +\par +\par 415 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 416 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par +\par -414.100006 +\par 1.3.6.1.4.1.9328.50.3.1897 +\par TRUE +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 315 +\par +\par +\par 410 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 317 +\par +\par +\par 413 +\par 318 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 316 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 303 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 300 +\par +\par +\par 407 +\par 301 +\par +\par +\par 407 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 406 +\par 304 +\par +\par +\par 406 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 405 +\par 309 +\par +\par +\par 405 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par +\par -414.850006 +\par 1.3.6.1.4.1.9328.50.3.1898 +\par TRUE +\par +\par 402 +\par 311 +\par +\par +\par 403 +\par 312 +\par +\par +\par 404 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 318 +\par +\par +\par 413 +\par 317 +\par +\par +\par 414 +\par 316 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 314 +\par +\par +\par 415 +\par 313 +\par +\par +\par 416 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 417 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 302 +\par +\par +\par 408 +\par 302 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 303 +\par +\par +\par 404 +\par 304 +\par +\par +\par 404 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 403 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 402 +\par 310 +\par +\par +\par 402 +\par 311 +\par +\par +\par +\par -415.600006 +\par 1.3.6.1.4.1.9328.50.3.1899 +\par TRUE +\par +\par 402 +\par 310 +\par +\par +\par 403 +\par 311 +\par +\par +\par 404 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 321 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 415 +\par 318 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 418 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 299 +\par +\par +\par 415 +\par 299 +\par +\par +\par 414 +\par 300 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 302 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 301 +\par +\par +\par 407 +\par 300 +\par +\par +\par 407 +\par 301 +\par +\par +\par 406 +\par 302 +\par +\par +\par 406 +\par 303 +\par +\par +\par 405 +\par 304 +\par +\par +\par 405 +\par 305 +\par +\par +\par 404 +\par 306 +\par +\par +\par 404 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 403 +\par 309 +\par +\par +\par 402 +\par 310 +\par +\par +\par +\par -416.350006 +\par 1.3.6.1.4.1.9328.50.3.1900 +\par TRUE +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 304 +\par +\par +\par 408 +\par 305 +\par +\par +\par 407 +\par 306 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 404 +\par 308 +\par +\par +\par 403 +\par 308 +\par +\par +\par 402 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 418 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 421 +\par 304 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 302 +\par +\par +\par 416 +\par 302 +\par +\par +\par 415 +\par 301 +\par +\par +\par 414 +\par 301 +\par +\par +\par +\par -417.100006 +\par 1.3.6.1.4.1.9328.50.3.1901 +\par TRUE +\par +\par 413 +\par 323 +\par +\par +\par 414 +\par 322 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 320 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 417 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 418 +\par 306 +\par +\par +\par 417 +\par 305 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 414 +\par 301 +\par +\par +\par 413 +\par 300 +\par +\par +\par 412 +\par 299 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 300 +\par +\par +\par 409 +\par 301 +\par +\par +\par 408 +\par 300 +\par +\par +\par 407 +\par 300 +\par +\par +\par 406 +\par 301 +\par +\par +\par 405 +\par 302 +\par +\par +\par 404 +\par 302 +\par +\par +\par 403 +\par 303 +\par +\par +\par 402 +\par 304 +\par +\par +\par 401 +\par 305 +\par +\par +\par 402 +\par 306 +\par +\par +\par 402 +\par 307 +\par +\par +\par 403 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 404 +\par 310 +\par +\par +\par 405 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 320 +\par +\par +\par 412 +\par 321 +\par +\par +\par 412 +\par 322 +\par +\par +\par 413 +\par 323 +\par +\par +\par +\par -417.850006 +\par 1.3.6.1.4.1.9328.50.3.1902 +\par TRUE +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 423 +\par 309 +\par +\par +\par 422 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 420 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 418 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 301 +\par +\par +\par 411 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 408 +\par 304 +\par +\par +\par 407 +\par 305 +\par +\par +\par 406 +\par 306 +\par +\par +\par 405 +\par 307 +\par +\par +\par 405 +\par 308 +\par +\par +\par 404 +\par 309 +\par +\par +\par 403 +\par 310 +\par +\par +\par 404 +\par 311 +\par +\par +\par 405 +\par 312 +\par +\par +\par 406 +\par 313 +\par +\par +\par 407 +\par 314 +\par +\par +\par 408 +\par 315 +\par +\par +\par 409 +\par 316 +\par +\par +\par 410 +\par 316 +\par +\par +\par 411 +\par 317 +\par +\par +\par 412 +\par 318 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 322 +\par +\par +\par +\par -418.600006 +\par 1.3.6.1.4.1.9328.50.3.1903 +\par TRUE +\par +\par 414 +\par 323 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 423 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 422 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 421 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 300 +\par +\par +\par 411 +\par 301 +\par +\par +\par 411 +\par 302 +\par +\par +\par 410 +\par 303 +\par +\par +\par 410 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 409 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 407 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 406 +\par 311 +\par +\par +\par 406 +\par 312 +\par +\par +\par 405 +\par 313 +\par +\par +\par 405 +\par 314 +\par +\par +\par 404 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 407 +\par 317 +\par +\par +\par 408 +\par 318 +\par +\par +\par 409 +\par 319 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 322 +\par +\par +\par 414 +\par 323 +\par +\par +\par +\par -419.350006 +\par 1.3.6.1.4.1.9328.50.3.1904 +\par TRUE +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 418 +\par 318 +\par +\par +\par 419 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 422 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 423 +\par 309 +\par +\par +\par 423 +\par 308 +\par +\par +\par 424 +\par 307 +\par +\par +\par 424 +\par 306 +\par +\par +\par 423 +\par 305 +\par +\par +\par 423 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 422 +\par 302 +\par +\par +\par 421 +\par 301 +\par +\par +\par 421 +\par 300 +\par +\par +\par 420 +\par 301 +\par +\par +\par 419 +\par 302 +\par +\par +\par 418 +\par 303 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 305 +\par +\par +\par 415 +\par 306 +\par +\par +\par 414 +\par 307 +\par +\par +\par 413 +\par 308 +\par +\par +\par 412 +\par 308 +\par +\par +\par 411 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 408 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 405 +\par 315 +\par +\par +\par 404 +\par 316 +\par +\par +\par 405 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 407 +\par 317 +\par +\par +\par 408 +\par 318 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 319 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 320 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 322 +\par +\par +\par +\par -420.100006 +\par 1.3.6.1.4.1.9328.50.3.1905 +\par TRUE +\par +\par 416 +\par 322 +\par +\par +\par 417 +\par 321 +\par +\par +\par 418 +\par 320 +\par +\par +\par 418 +\par 319 +\par +\par +\par 419 +\par 318 +\par +\par +\par 420 +\par 317 +\par +\par +\par 421 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 423 +\par 313 +\par +\par +\par 424 +\par 312 +\par +\par +\par 424 +\par 311 +\par +\par +\par 425 +\par 310 +\par +\par +\par 424 +\par 309 +\par +\par +\par 423 +\par 308 +\par +\par +\par 422 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 420 +\par 305 +\par +\par +\par 420 +\par 304 +\par +\par +\par 419 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 299 +\par +\par +\par 415 +\par 300 +\par +\par +\par 414 +\par 301 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 303 +\par +\par +\par 413 +\par 304 +\par +\par +\par 412 +\par 305 +\par +\par +\par 412 +\par 306 +\par +\par +\par 411 +\par 307 +\par +\par +\par 411 +\par 308 +\par +\par +\par 410 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 408 +\par 313 +\par +\par +\par 408 +\par 314 +\par +\par +\par 407 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 406 +\par 317 +\par +\par +\par 406 +\par 318 +\par +\par +\par 405 +\par 319 +\par +\par +\par 406 +\par 319 +\par +\par +\par 407 +\par 320 +\par +\par +\par 408 +\par 320 +\par +\par +\par 409 +\par 320 +\par +\par +\par 410 +\par 320 +\par +\par +\par 411 +\par 321 +\par +\par +\par 412 +\par 321 +\par +\par +\par 413 +\par 321 +\par +\par +\par 414 +\par 321 +\par +\par +\par 415 +\par 322 +\par +\par +\par 416 +\par 322 +\par +\par +\par +\par -420.850006 +\par 1.3.6.1.4.1.9328.50.3.1906 +\par TRUE +\par +\par 416 +\par 321 +\par +\par +\par 416 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 421 +\par 306 +\par +\par +\par 421 +\par 305 +\par +\par +\par 422 +\par 304 +\par +\par +\par 422 +\par 303 +\par +\par +\par 421 +\par 302 +\par +\par +\par 420 +\par 302 +\par +\par +\par 419 +\par 301 +\par +\par +\par 418 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 299 +\par +\par +\par 414 +\par 299 +\par +\par +\par 413 +\par 300 +\par +\par +\par 413 +\par 301 +\par +\par +\par 412 +\par 302 +\par +\par +\par 412 +\par 303 +\par +\par +\par 411 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 410 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 409 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 408 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 405 +\par 316 +\par +\par +\par 406 +\par 316 +\par +\par +\par 407 +\par 317 +\par +\par +\par 408 +\par 317 +\par +\par +\par 409 +\par 318 +\par +\par +\par 410 +\par 318 +\par +\par +\par 411 +\par 319 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 320 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 321 +\par +\par +\par 416 +\par 321 +\par +\par +\par +\par -421.600006 +\par 1.3.6.1.4.1.9328.50.3.1907 +\par TRUE +\par +\par 416 +\par 321 +\par +\par +\par 417 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 418 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 420 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 421 +\par 312 +\par +\par +\par 421 +\par 311 +\par +\par +\par 422 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 420 +\par 308 +\par +\par +\par 419 +\par 308 +\par +\par +\par 418 +\par 307 +\par +\par +\par 417 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 415 +\par 304 +\par +\par +\par 414 +\par 303 +\par +\par +\par 413 +\par 303 +\par +\par +\par 412 +\par 302 +\par +\par +\par 411 +\par 301 +\par +\par +\par 410 +\par 300 +\par +\par +\par 410 +\par 301 +\par +\par +\par 409 +\par 302 +\par +\par +\par 409 +\par 303 +\par +\par +\par 409 +\par 304 +\par +\par +\par 409 +\par 305 +\par +\par +\par 408 +\par 306 +\par +\par +\par 408 +\par 307 +\par +\par +\par 408 +\par 308 +\par +\par +\par 408 +\par 309 +\par +\par +\par 407 +\par 310 +\par +\par +\par 407 +\par 311 +\par +\par +\par 407 +\par 312 +\par +\par +\par 407 +\par 313 +\par +\par +\par 406 +\par 314 +\par +\par +\par 406 +\par 315 +\par +\par +\par 407 +\par 316 +\par +\par +\par 408 +\par 316 +\par +\par +\par 409 +\par 317 +\par +\par +\par 410 +\par 317 +\par +\par +\par 411 +\par 318 +\par +\par +\par 412 +\par 319 +\par +\par +\par 413 +\par 319 +\par +\par +\par 414 +\par 320 +\par +\par +\par 415 +\par 320 +\par +\par +\par 416 +\par 321 +\par +\par +\par +\par -422.350006 +\par 1.3.6.1.4.1.9328.50.3.1908 +\par TRUE +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 419 +\par 317 +\par +\par +\par 420 +\par 316 +\par +\par +\par 421 +\par 316 +\par +\par +\par 422 +\par 315 +\par +\par +\par 421 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 418 +\par 309 +\par +\par +\par 418 +\par 308 +\par +\par +\par 417 +\par 307 +\par +\par +\par 416 +\par 306 +\par +\par +\par 416 +\par 305 +\par +\par +\par 415 +\par 304 +\par +\par +\par 414 +\par 303 +\par +\par +\par 414 +\par 302 +\par +\par +\par 413 +\par 301 +\par +\par +\par 413 +\par 302 +\par +\par +\par 412 +\par 303 +\par +\par +\par 412 +\par 304 +\par +\par +\par 411 +\par 305 +\par +\par +\par 411 +\par 306 +\par +\par +\par 410 +\par 307 +\par +\par +\par 410 +\par 308 +\par +\par +\par 409 +\par 309 +\par +\par +\par 409 +\par 310 +\par +\par +\par 408 +\par 311 +\par +\par +\par 409 +\par 312 +\par +\par +\par 410 +\par 313 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 316 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 317 +\par +\par +\par 417 +\par 318 +\par +\par +\par +\par -423.100006 +\par 1.3.6.1.4.1.9328.50.3.1909 +\par TRUE +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 316 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 318 +\par +\par +\par 416 +\par 319 +\par +\par +\par 417 +\par 320 +\par +\par +\par 417 +\par 319 +\par +\par +\par 418 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 421 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 420 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 419 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 301 +\par +\par +\par 417 +\par 300 +\par +\par +\par 416 +\par 299 +\par +\par +\par 416 +\par 300 +\par +\par +\par 415 +\par 301 +\par +\par +\par 415 +\par 302 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 304 +\par +\par +\par 414 +\par 305 +\par +\par +\par 413 +\par 306 +\par +\par +\par 413 +\par 307 +\par +\par +\par 413 +\par 308 +\par +\par +\par 412 +\par 309 +\par +\par +\par 412 +\par 310 +\par +\par +\par 412 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par +\par -423.850006 +\par 1.3.6.1.4.1.9328.50.3.1910 +\par TRUE +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 316 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 318 +\par +\par +\par 415 +\par 319 +\par +\par +\par 416 +\par 320 +\par +\par +\par 416 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 418 +\par 315 +\par +\par +\par 418 +\par 314 +\par +\par +\par 419 +\par 313 +\par +\par +\par 419 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 420 +\par 310 +\par +\par +\par 420 +\par 309 +\par +\par +\par 421 +\par 308 +\par +\par +\par 421 +\par 307 +\par +\par +\par 420 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 418 +\par 305 +\par +\par +\par 417 +\par 304 +\par +\par +\par 416 +\par 303 +\par +\par +\par 415 +\par 302 +\par +\par +\par 415 +\par 303 +\par +\par +\par 414 +\par 304 +\par +\par +\par 414 +\par 305 +\par +\par +\par 413 +\par 306 +\par +\par +\par 413 +\par 307 +\par +\par +\par 413 +\par 308 +\par +\par +\par 412 +\par 309 +\par +\par +\par 412 +\par 310 +\par +\par +\par 411 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par +\par -424.600006 +\par 1.3.6.1.4.1.9328.50.3.1911 +\par TRUE +\par +\par 416 +\par 319 +\par +\par +\par 417 +\par 318 +\par +\par +\par 417 +\par 317 +\par +\par +\par 418 +\par 316 +\par +\par +\par 419 +\par 315 +\par +\par +\par 419 +\par 314 +\par +\par +\par 420 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 420 +\par 311 +\par +\par +\par 419 +\par 310 +\par +\par +\par 419 +\par 309 +\par +\par +\par 419 +\par 308 +\par +\par +\par 419 +\par 307 +\par +\par +\par 419 +\par 306 +\par +\par +\par 419 +\par 305 +\par +\par +\par 418 +\par 304 +\par +\par +\par 418 +\par 303 +\par +\par +\par 418 +\par 302 +\par +\par +\par 417 +\par 303 +\par +\par +\par 416 +\par 304 +\par +\par +\par 415 +\par 305 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 307 +\par +\par +\par 412 +\par 308 +\par +\par +\par 411 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 411 +\par 311 +\par +\par +\par 411 +\par 312 +\par +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 314 +\par +\par +\par 413 +\par 315 +\par +\par +\par 414 +\par 316 +\par +\par +\par 415 +\par 317 +\par +\par +\par 415 +\par 318 +\par +\par +\par 416 +\par 319 +\par +\par +\par +\par -425.350006 +\par 1.3.6.1.4.1.9328.50.3.1912 +\par TRUE +\par +\par 417 +\par 318 +\par +\par +\par 418 +\par 317 +\par +\par +\par 419 +\par 316 +\par +\par +\par 420 +\par 316 +\par +\par +\par 421 +\par 315 +\par +\par +\par 422 +\par 314 +\par +\par +\par 421 +\par 313 +\par +\par +\par 420 +\par 312 +\par +\par +\par 419 +\par 311 +\par +\par +\par 418 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 415 +\par 307 +\par +\par +\par 414 +\par 306 +\par +\par +\par 413 +\par 305 +\par +\par +\par 412 +\par 306 +\par +\par +\par 412 +\par 307 +\par +\par +\par 411 +\par 308 +\par +\par +\par 410 +\par 309 +\par +\par +\par 410 +\par 310 +\par +\par +\par 409 +\par 311 +\par +\par +\par 410 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 413 +\par 314 +\par +\par +\par 414 +\par 315 +\par +\par +\par 415 +\par 316 +\par +\par +\par 416 +\par 317 +\par +\par +\par 417 +\par 318 +\par +\par +\par +\par -426.100006 +\par 1.3.6.1.4.1.9328.50.3.1913 +\par TRUE +\par +\par 411 +\par 316 +\par +\par +\par 412 +\par 316 +\par +\par +\par 413 +\par 316 +\par +\par +\par 414 +\par 317 +\par +\par +\par 415 +\par 317 +\par +\par +\par 416 +\par 317 +\par +\par +\par 416 +\par 316 +\par +\par +\par 416 +\par 315 +\par +\par +\par 416 +\par 314 +\par +\par +\par 417 +\par 313 +\par +\par +\par 417 +\par 312 +\par +\par +\par 417 +\par 311 +\par +\par +\par 417 +\par 310 +\par +\par +\par 417 +\par 309 +\par +\par +\par 416 +\par 308 +\par +\par +\par 415 +\par 308 +\par +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 310 +\par +\par +\par 414 +\par 311 +\par +\par +\par 413 +\par 312 +\par +\par +\par 413 +\par 313 +\par +\par +\par 412 +\par 314 +\par +\par +\par 412 +\par 315 +\par +\par +\par 411 +\par 316 +\par +\par +\par +\par -426.850006 +\par 1.3.6.1.4.1.9328.50.3.1914 +\par TRUE +\par +\par 417 +\par 311 +\par +\par +\par 416 +\par 310 +\par +\par +\par 415 +\par 309 +\par +\par +\par 414 +\par 310 +\par +\par +\par 413 +\par 311 +\par +\par +\par 412 +\par 312 +\par +\par +\par 411 +\par 313 +\par +\par +\par 412 +\par 313 +\par +\par +\par 413 +\par 312 +\par +\par +\par 414 +\par 312 +\par +\par +\par 415 +\par 312 +\par +\par +\par 416 +\par 311 +\par +\par +\par 417 +\par 311 +\par +\par +\par +\par +\par 1 +\par +\par -567.849976 +\par 1.3.6.1.4.1.9328.50.3.1933 +\par TRUE +\par +\par 58 +\par 278 +\par +\par +\par +\par +\par 2 +\par +\par 4 +\par 1 +\par 6 +\par 3 +\par 4 +\par 2 +\par 1 +\par 5 +\par 2 +\par +\par +\par -440.350006 +\par 1.3.6.1.4.1.9328.50.3.1922 +\par TRUE +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 302 +\par +\par +\par 430 +\par 303 +\par +\par +\par 430 +\par 304 +\par +\par +\par 429 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 306 +\par +\par +\par 430 +\par 306 +\par +\par +\par 431 +\par 307 +\par +\par +\par 432 +\par 307 +\par +\par +\par 432 +\par 306 +\par +\par +\par 432 +\par 305 +\par +\par +\par 432 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 433 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par +\par -441.100006 +\par 1.3.6.1.4.1.9328.50.3.1923 +\par TRUE +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 299 +\par +\par +\par 430 +\par 300 +\par +\par +\par 429 +\par 301 +\par +\par +\par 428 +\par 302 +\par +\par +\par 428 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 430 +\par 308 +\par +\par +\par 431 +\par 309 +\par +\par +\par 431 +\par 310 +\par +\par +\par 432 +\par 311 +\par +\par +\par +\par -441.850006 +\par 1.3.6.1.4.1.9328.50.3.1924 +\par TRUE +\par +\par 431 +\par 312 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 299 +\par +\par +\par 431 +\par 300 +\par +\par +\par 430 +\par 301 +\par +\par +\par 429 +\par 302 +\par +\par +\par 428 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 430 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par +\par -442.600006 +\par 1.3.6.1.4.1.9328.50.3.1925 +\par TRUE +\par +\par 431 +\par 312 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 434 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 433 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 297 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 427 +\par 301 +\par +\par +\par 426 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 430 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 312 +\par +\par +\par +\par -443.350006 +\par 1.3.6.1.4.1.9328.50.3.1926 +\par TRUE +\par +\par 432 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 435 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 432 +\par 302 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 300 +\par +\par +\par 430 +\par 299 +\par +\par +\par 430 +\par 298 +\par +\par +\par 429 +\par 297 +\par +\par +\par 429 +\par 298 +\par +\par +\par 428 +\par 299 +\par +\par +\par 428 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 426 +\par 304 +\par +\par +\par 426 +\par 305 +\par +\par +\par 427 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par +\par -444.100006 +\par 1.3.6.1.4.1.9328.50.3.1927 +\par TRUE +\par +\par 434 +\par 297 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 299 +\par +\par +\par 430 +\par 300 +\par +\par +\par 429 +\par 301 +\par +\par +\par 428 +\par 301 +\par +\par +\par 427 +\par 302 +\par +\par +\par 426 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 427 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 430 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 433 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 434 +\par 298 +\par +\par +\par 434 +\par 297 +\par +\par +\par +\par -444.850006 +\par 1.3.6.1.4.1.9328.50.3.1928 +\par TRUE +\par +\par 431 +\par 314 +\par +\par +\par 431 +\par 313 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 311 +\par +\par +\par 432 +\par 310 +\par +\par +\par 432 +\par 309 +\par +\par +\par 432 +\par 308 +\par +\par +\par 432 +\par 307 +\par +\par +\par 433 +\par 306 +\par +\par +\par 433 +\par 305 +\par +\par +\par 433 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 434 +\par 301 +\par +\par +\par 434 +\par 300 +\par +\par +\par 434 +\par 299 +\par +\par +\par 433 +\par 298 +\par +\par +\par 432 +\par 298 +\par +\par +\par 431 +\par 297 +\par +\par +\par 431 +\par 298 +\par +\par +\par 430 +\par 299 +\par +\par +\par 430 +\par 300 +\par +\par +\par 430 +\par 301 +\par +\par +\par 429 +\par 302 +\par +\par +\par 429 +\par 303 +\par +\par +\par 428 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 427 +\par 307 +\par +\par +\par 427 +\par 308 +\par +\par +\par 428 +\par 309 +\par +\par +\par 428 +\par 310 +\par +\par +\par 429 +\par 311 +\par +\par +\par 430 +\par 312 +\par +\par +\par 430 +\par 313 +\par +\par +\par 431 +\par 314 +\par +\par +\par +\par -445.600006 +\par 1.3.6.1.4.1.9328.50.3.1929 +\par TRUE +\par +\par 432 +\par 314 +\par +\par +\par 432 +\par 313 +\par +\par +\par 432 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 435 +\par 303 +\par +\par +\par 435 +\par 302 +\par +\par +\par 435 +\par 301 +\par +\par +\par 434 +\par 301 +\par +\par +\par 433 +\par 300 +\par +\par +\par 432 +\par 300 +\par +\par +\par 431 +\par 300 +\par +\par +\par 430 +\par 299 +\par +\par +\par 429 +\par 299 +\par +\par +\par 429 +\par 300 +\par +\par +\par 428 +\par 301 +\par +\par +\par 428 +\par 302 +\par +\par +\par 427 +\par 303 +\par +\par +\par 427 +\par 304 +\par +\par +\par 428 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 429 +\par 308 +\par +\par +\par 430 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 431 +\par 312 +\par +\par +\par 432 +\par 314 +\par +\par +\par +\par -446.350006 +\par 1.3.6.1.4.1.9328.50.3.1930 +\par TRUE +\par +\par 433 +\par 313 +\par +\par +\par 433 +\par 312 +\par +\par +\par 433 +\par 311 +\par +\par +\par 434 +\par 310 +\par +\par +\par 434 +\par 309 +\par +\par +\par 434 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 435 +\par 306 +\par +\par +\par 435 +\par 305 +\par +\par +\par 435 +\par 304 +\par +\par +\par 434 +\par 304 +\par +\par +\par 433 +\par 303 +\par +\par +\par 432 +\par 303 +\par +\par +\par 431 +\par 303 +\par +\par +\par 430 +\par 302 +\par +\par +\par 429 +\par 302 +\par +\par +\par 429 +\par 303 +\par +\par +\par 429 +\par 304 +\par +\par +\par 429 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 428 +\par 307 +\par +\par +\par 428 +\par 308 +\par +\par +\par 429 +\par 309 +\par +\par +\par 430 +\par 310 +\par +\par +\par 431 +\par 311 +\par +\par +\par 432 +\par 312 +\par +\par +\par 433 +\par 313 +\par +\par +\par +\par -447.100006 +\par 1.3.6.1.4.1.9328.50.3.1931 +\par TRUE +\par +\par 433 +\par 311 +\par +\par +\par 433 +\par 310 +\par +\par +\par 433 +\par 309 +\par +\par +\par 433 +\par 308 +\par +\par +\par 434 +\par 307 +\par +\par +\par 434 +\par 306 +\par +\par +\par 434 +\par 305 +\par +\par +\par 434 +\par 304 +\par +\par +\par 434 +\par 303 +\par +\par +\par 433 +\par 302 +\par +\par +\par 432 +\par 301 +\par +\par +\par 431 +\par 302 +\par +\par +\par 430 +\par 303 +\par +\par +\par 430 +\par 304 +\par +\par +\par 429 +\par 305 +\par +\par +\par 428 +\par 306 +\par +\par +\par 429 +\par 307 +\par +\par +\par 430 +\par 308 +\par +\par +\par 431 +\par 309 +\par +\par +\par 432 +\par 310 +\par +\par +\par 433 +\par 311 +\par +\par +\par +\par +\par 3 +\par +\par -480.850006 +\par 1.3.6.1.4.1.9328.50.3.1932 +\par TRUE +\par +\par 71 +\par 250 +\par +\par +\par +\par +\par 3 +\par -476.350006 +\par 1.3.6.1.4.1.9328.50.3.1940 +\par +\par 437 +\par 297 +\par +\par +\par +\par 3 +\par -513.099976 +\par 1.3.6.1.4.1.9328.50.3.1947 +\par +\par 319 +\par 410 +\par +\par +\par +\par 4 +\par -595.599976 +\par 1.3.6.1.4.1.9328.50.3.1951 +\par +\par 190 +\par 354 +\par +\par +\par +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of reading session +\par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\insrsid6382080 +\par }{\rtlch\fcs1 \ab\af2\afs20 \ltrch\fcs0 \b\f2\fs20\insrsid6382080 \tab End of LIDC Reading Message +\par }} \ No newline at end of file diff --git a/datasets/lidc/configs.py b/datasets/lidc/configs.py index 126300b..0a19633 100644 --- a/datasets/lidc/configs.py +++ b/datasets/lidc/configs.py @@ -1,445 +1,449 @@ #!/usr/bin/env python # Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import sys import os from collections import namedtuple sys.path.append(os.path.dirname(os.path.realpath(__file__))) import numpy as np sys.path.append(os.path.dirname(os.path.realpath(__file__))+"/../..") from default_configs import DefaultConfigs # legends, nested classes are not handled well in multiprocessing! hence, Label class def in outer scope Label = namedtuple("Label", ['id', 'name', 'color', 'm_scores']) # m_scores = malignancy scores binLabel = namedtuple("binLabel", ['id', 'name', 'color', 'm_scores', 'bin_vals']) class Configs(DefaultConfigs): def __init__(self, server_env=None): super(Configs, self).__init__(server_env) ######################### # Preprocessing # ######################### self.root_dir = '/home/gregor/networkdrives/E130-Personal/Goetz/Datenkollektive/Lungendaten/Nodules_LIDC_IDRI' self.raw_data_dir = '{}/new_nrrd'.format(self.root_dir) self.pp_dir = '/mnt/HDD2TB/Documents/data/lidc/pp_20190805' # 'merged' for one gt per image, 'single_annotator' for four gts per image. self.gts_to_produce = ["single_annotator", "merged"] self.target_spacing = (0.7, 0.7, 1.25) ######################### # I/O # ######################### # path to preprocessed data. #self.pp_name = 'pp_20190318' self.pp_name = 'pp_20190805' self.input_df_name = 'info_df.pickle' self.data_sourcedir = '/mnt/HDD2TB/Documents/data/lidc/{}/'.format(self.pp_name) # settings for deployment on cluster. if server_env: # path to preprocessed data. self.data_sourcedir = '/datasets/data_ramien/lidc/{}_npz/'.format(self.pp_name) # one out of ['mrcnn', 'retina_net', 'retina_unet', 'detection_fpn']. - self.model = 'retina_net' + self.model = 'mrcnn' self.model_path = 'models/{}.py'.format(self.model if not 'retina' in self.model else 'retina_net') self.model_path = os.path.join(self.source_dir, self.model_path) ######################### # Architecture # ######################### # dimension the model operates in. one out of [2, 3]. self.dim = 3 # 'class': standard object classification per roi, pairwise combinable with each of below tasks. # if 'class' is omitted from tasks, object classes will be fg/bg (1/0) from RPN. # 'regression': regress some vector per each roi # 'regression_ken_gal': use kendall-gal uncertainty sigma # 'regression_bin': classify each roi into a bin related to a regression scale - self.prediction_tasks = ['class'] + self.prediction_tasks = ['regression'] self.start_filts = 48 if self.dim == 2 else 18 self.end_filts = self.start_filts * 4 if self.dim == 2 else self.start_filts * 2 self.res_architecture = 'resnet50' # 'resnet101' , 'resnet50' self.norm = None # one of None, 'instance_norm', 'batch_norm' # one of 'xavier_uniform', 'xavier_normal', or 'kaiming_normal', None (=default = 'kaiming_uniform') self.weight_init = None self.regression_n_features = 1 ######################### # Data Loader # ######################### # distorted gt experiments: train on single-annotator gts in a random fashion to investigate network's # handling of noisy gts. # choose 'merged' for single, merged gt per image, or 'single_annotator' for four gts per image. # validation is always performed on same gt kind as training, testing always on merged gt. self.training_gts = "merged" # select modalities from preprocessed data self.channels = [0] self.n_channels = len(self.channels) # patch_size to be used for training. pre_crop_size is the patch_size before data augmentation. self.pre_crop_size_2D = [320, 320] self.patch_size_2D = [320, 320] self.pre_crop_size_3D = [160, 160, 96] self.patch_size_3D = [160, 160, 96] self.patch_size = self.patch_size_2D if self.dim == 2 else self.patch_size_3D self.pre_crop_size = self.pre_crop_size_2D if self.dim == 2 else self.pre_crop_size_3D # ratio of free sampled batch elements before class balancing is triggered # (>0 to include "empty"/background patches.) self.batch_random_ratio = 0.3 self.balance_target = "class_targets" if 'class' in self.prediction_tasks else 'rg_bin_targets' # set 2D network to match 3D gt boxes. self.merge_2D_to_3D_preds = self.dim==2 self.observables_rois = [] #self.rg_map = {1:1, 2:2, 3:3, 4:4, 5:5} ######################### # Colors and Legends # ######################### self.plot_frequency = 5 binary_cl_labels = [Label(1, 'benign', (*self.dark_green, 1.), (1, 2)), Label(2, 'malignant', (*self.red, 1.), (3, 4, 5))] quintuple_cl_labels = [Label(1, 'MS1', (*self.dark_green, 1.), (1,)), Label(2, 'MS2', (*self.dark_yellow, 1.), (2,)), Label(3, 'MS3', (*self.orange, 1.), (3,)), Label(4, 'MS4', (*self.bright_red, 1.), (4,)), Label(5, 'MS5', (*self.red, 1.), (5,))] # choose here if to do 2-way or 5-way regression-bin classification task_spec_cl_labels = quintuple_cl_labels self.class_labels = [ # #id #name #color #malignancy score Label( 0, 'bg', (*self.gray, 0.), (0,))] if "class" in self.prediction_tasks: self.class_labels += task_spec_cl_labels else: self.class_labels += [Label(1, 'lesion', (*self.orange, 1.), (1,2,3,4,5))] if any(['regression' in task for task in self.prediction_tasks]): self.bin_labels = [binLabel(0, 'MS0', (*self.gray, 1.), (0,), (0,))] self.bin_labels += [binLabel(cll.id, cll.name, cll.color, cll.m_scores, tuple([ms for ms in cll.m_scores])) for cll in task_spec_cl_labels] self.bin_id2label = {label.id: label for label in self.bin_labels} self.ms2bin_label = {ms: label for label in self.bin_labels for ms in label.m_scores} bins = [(min(label.bin_vals), max(label.bin_vals)) for label in self.bin_labels] self.bin_id2rg_val = {ix: [np.mean(bin)] for ix, bin in enumerate(bins)} self.bin_edges = [(bins[i][1] + bins[i + 1][0]) / 2 for i in range(len(bins) - 1)] if self.class_specific_seg: self.seg_labels = self.class_labels else: self.seg_labels = [ # id #name #color Label(0, 'bg', (*self.gray, 0.)), Label(1, 'fg', (*self.orange, 1.)) ] self.class_id2label = {label.id: label for label in self.class_labels} self.class_dict = {label.id: label.name for label in self.class_labels if label.id != 0} # class_dict is used in evaluator / ap, auc, etc. statistics, and class 0 (bg) only needs to be # evaluated in debugging self.class_cmap = {label.id: label.color for label in self.class_labels} self.seg_id2label = {label.id: label for label in self.seg_labels} self.cmap = {label.id: label.color for label in self.seg_labels} self.plot_prediction_histograms = True self.plot_stat_curves = False self.has_colorchannels = False self.plot_class_ids = True self.num_classes = len(self.class_dict) # for instance classification (excl background) self.num_seg_classes = len(self.seg_labels) # incl background ######################### # Data Augmentation # ######################### self.da_kwargs={ 'mirror': True, 'mirror_axes': tuple(np.arange(0, self.dim, 1)), 'do_elastic_deform': True, 'alpha':(0., 1500.), 'sigma':(30., 50.), 'do_rotation':True, 'angle_x': (0., 2 * np.pi), 'angle_y': (0., 0), 'angle_z': (0., 0), 'do_scale': True, 'scale':(0.8, 1.1), 'random_crop':False, 'rand_crop_dist': (self.patch_size[0] / 2. - 3, self.patch_size[1] / 2. - 3), 'border_mode_data': 'constant', 'border_cval_data': 0, 'order_data': 1} if self.dim == 3: self.da_kwargs['do_elastic_deform'] = False self.da_kwargs['angle_x'] = (0, 0.0) self.da_kwargs['angle_y'] = (0, 0.0) #must be 0!! self.da_kwargs['angle_z'] = (0., 2 * np.pi) ################################# # Schedule / Selection / Optim # ################################# self.num_epochs = 130 if self.dim == 2 else 150 self.num_train_batches = 200 if self.dim == 2 else 200 self.batch_size = 20 if self.dim == 2 else 8 # decide whether to validate on entire patient volumes (like testing) or sampled patches (like training) # the former is morge accurate, while the latter is faster (depending on volume size) self.val_mode = 'val_sampling' # only 'val_sampling', 'val_patient' not implemented if self.val_mode == 'val_patient': raise NotImplementedError if self.val_mode == 'val_sampling': self.num_val_batches = 70 self.save_n_models = 4 # set a minimum epoch number for saving in case of instabilities in the first phase of training. self.min_save_thresh = 0 if self.dim == 2 else 0 # criteria to average over for saving epochs, 'criterion':weight. if "class" in self.prediction_tasks: # 'criterion': weight if len(self.class_labels)==3: self.model_selection_criteria = {"benign_ap": 0.5, "malignant_ap": 0.5} elif len(self.class_labels)==6: self.model_selection_criteria = {str(label.name)+"_ap": 1./5 for label in self.class_labels if label.id!=0} elif any("regression" in task for task in self.prediction_tasks): self.model_selection_criteria = {"lesion_ap": 0.2, "lesion_avp": 0.8} self.weight_decay = 0 self.clip_norm = 200 if 'regression_ken_gal' in self.prediction_tasks else None # number or None # int in [0, dataset_size]. select n patients from dataset for prototyping. If None, all data is used. self.select_prototype_subset = None #self.batch_size ######################### # Testing # ######################### # set the top-n-epochs to be saved for temporal averaging in testing. self.test_n_epochs = self.save_n_models self.test_aug_axes = (0,1,(0,1)) # None or list: choices are 0,1,(0,1) (0==spatial y, 1== spatial x). self.held_out_test_set = False self.max_test_patients = "all" # "all" or number self.report_score_level = ['rois', 'patient'] # choose list from 'patient', 'rois' self.patient_class_of_interest = 2 if 'class' in self.prediction_tasks else 1 self.metrics = ['ap', 'auc'] if any(['regression' in task for task in self.prediction_tasks]): self.metrics += ['avp', 'rg_MAE_weighted', 'rg_MAE_weighted_tp', 'rg_bin_accuracy_weighted', 'rg_bin_accuracy_weighted_tp'] if 'aleatoric' in self.model: self.metrics += ['rg_uncertainty', 'rg_uncertainty_tp', 'rg_uncertainty_tp_weighted'] self.evaluate_fold_means = True self.ap_match_ious = [0.1] # list of ious to be evaluated for ap-scoring. self.min_det_thresh = 0.1 # minimum confidence value to select predictions for evaluation. # aggregation method for test and val_patient predictions. # wbc = weighted box clustering as in https://arxiv.org/pdf/1811.08661.pdf, # nms = standard non-maximum suppression, or None = no clustering self.clustering = 'wbc' # iou thresh (exclusive!) for regarding two preds as concerning the same ROI self.clustering_iou = 0.1 # has to be larger than desired possible overlap iou of model predictions self.plot_prediction_histograms = True self.plot_stat_curves = False self.n_test_plots = 1 ######################### # Assertions # ######################### if not 'class' in self.prediction_tasks: assert self.num_classes == 1 ######################### # Add model specifics # ######################### {'detection_fpn': self.add_det_fpn_configs, 'mrcnn': self.add_mrcnn_configs, 'mrcnn_aleatoric': self.add_mrcnn_configs, + 'ufrcnn': self.add_mrcnn_configs, 'retina_net': self.add_mrcnn_configs, 'retina_unet': self.add_mrcnn_configs, }[self.model]() def rg_val_to_bin_id(self, rg_val): return float(np.digitize(np.mean(rg_val), self.bin_edges)) def add_det_fpn_configs(self): self.learning_rate = [1e-4] * self.num_epochs self.dynamic_lr_scheduling = False # RoI score assigned to aggregation from pixel prediction (connected component). One of ['max', 'median']. self.score_det = 'max' # max number of roi candidates to identify per batch element and class. self.n_roi_candidates = 10 if self.dim == 2 else 30 # loss mode: either weighted cross entropy ('wce'), batch-wise dice loss ('dice), or the sum of both ('dice_wce') self.seg_loss_mode = 'wce' # if <1, false positive predictions in foreground are penalized less. self.fp_dice_weight = 1 if self.dim == 2 else 1 if len(self.class_labels)==3: self.wce_weights = [1., 1., 1.] if self.seg_loss_mode=="dice_wce" else [0.1, 1., 1.] elif len(self.class_labels)==6: self.wce_weights = [1., 1., 1., 1., 1., 1.] if self.seg_loss_mode == "dice_wce" else [0.1, 1., 1., 1., 1., 1.] else: raise Exception("mismatch loss weights & nr of classes") self.detection_min_confidence = self.min_det_thresh self.head_classes = self.num_seg_classes def add_mrcnn_configs(self): # learning rate is a list with one entry per epoch. self.learning_rate = [1e-4] * self.num_epochs self.dynamic_lr_scheduling = False # disable the re-sampling of mask proposals to original size for speed-up. # since evaluation is detection-driven (box-matching) and not instance segmentation-driven (iou-matching), # mask-outputs are optional. self.return_masks_in_train = False self.return_masks_in_val = True self.return_masks_in_test = False # set number of proposal boxes to plot after each epoch. self.n_plot_rpn_props = 5 if self.dim == 2 else 30 # number of classes for network heads: n_foreground_classes + 1 (background) self.head_classes = self.num_classes + 1 self.frcnn_mode = False # feature map strides per pyramid level are inferred from architecture. self.backbone_strides = {'xy': [4, 8, 16, 32], 'z': [1, 2, 4, 8]} # anchor scales are chosen according to expected object sizes in data set. Default uses only one anchor scale # per pyramid level. (outer list are pyramid levels (corresponding to BACKBONE_STRIDES), inner list are scales per level.) self.rpn_anchor_scales = {'xy': [[8], [16], [32], [64]], 'z': [[2], [4], [8], [16]]} # choose which pyramid levels to extract features from: P2: 0, P3: 1, P4: 2, P5: 3. self.pyramid_levels = [0, 1, 2, 3] # number of feature maps in rpn. typically lowered in 3D to save gpu-memory. self.n_rpn_features = 512 if self.dim == 2 else 128 # anchor ratios and strides per position in feature maps. self.rpn_anchor_ratios = [0.5, 1, 2] self.rpn_anchor_stride = 1 # Threshold for first stage (RPN) non-maximum suppression (NMS): LOWER == HARDER SELECTION self.rpn_nms_threshold = 0.7 if self.dim == 2 else 0.7 # loss sampling settings. self.rpn_train_anchors_per_image = 6 #per batch element self.train_rois_per_image = 6 #per batch element self.roi_positive_ratio = 0.5 self.anchor_matching_iou = 0.7 # factor of top-k candidates to draw from per negative sample (stochastic-hard-example-mining). # poolsize to draw top-k candidates from will be shem_poolsize * n_negative_samples. self.shem_poolsize = 10 self.pool_size = (7, 7) if self.dim == 2 else (7, 7, 3) self.mask_pool_size = (14, 14) if self.dim == 2 else (14, 14, 5) self.mask_shape = (28, 28) if self.dim == 2 else (28, 28, 10) self.rpn_bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) self.bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) self.window = np.array([0, 0, self.patch_size[0], self.patch_size[1], 0, self.patch_size_3D[2]]) self.scale = np.array([self.patch_size[0], self.patch_size[1], self.patch_size[0], self.patch_size[1], self.patch_size_3D[2], self.patch_size_3D[2]]) if self.dim == 2: self.rpn_bbox_std_dev = self.rpn_bbox_std_dev[:4] self.bbox_std_dev = self.bbox_std_dev[:4] self.window = self.window[:4] self.scale = self.scale[:4] # pre-selection in proposal-layer (stage 1) for NMS-speedup. applied per batch element. self.pre_nms_limit = 3000 if self.dim == 2 else 6000 # n_proposals to be selected after NMS per batch element. too high numbers blow up memory if "detect_while_training" is True, # since proposals of the entire batch are forwarded through second stage in as one "batch". self.roi_chunk_size = 2500 if self.dim == 2 else 600 self.post_nms_rois_training = 500 if self.dim == 2 else 75 self.post_nms_rois_inference = 500 # Final selection of detections (refine_detections) self.model_max_instances_per_batch_element = 10 if self.dim == 2 else 30 # per batch element and class. self.detection_nms_threshold = 1e-5 # needs to be > 0, otherwise all predictions are one cluster. self.model_min_confidence = 0.1 if self.dim == 2: self.backbone_shapes = np.array( [[int(np.ceil(self.patch_size[0] / stride)), int(np.ceil(self.patch_size[1] / stride))] for stride in self.backbone_strides['xy']]) else: self.backbone_shapes = np.array( [[int(np.ceil(self.patch_size[0] / stride)), int(np.ceil(self.patch_size[1] / stride)), int(np.ceil(self.patch_size[2] / stride_z))] for stride, stride_z in zip(self.backbone_strides['xy'], self.backbone_strides['z'] )]) - if self.model == 'retina_net' or self.model == 'retina_unet': - - self.focal_loss = True + if self.model == 'ufrcnn': + self.operate_stride1 = True + self.class_specific_seg_flag = True + self.num_seg_classes = 3 if self.class_specific_seg_flag else 2 + self.frcnn_mode = True + if self.model == 'retina_net' or self.model == 'retina_unet': # implement extra anchor-scales according to retina-net publication. self.rpn_anchor_scales['xy'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in self.rpn_anchor_scales['xy']] self.rpn_anchor_scales['z'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in self.rpn_anchor_scales['z']] self.n_anchors_per_pos = len(self.rpn_anchor_ratios) * 3 self.n_rpn_features = 256 if self.dim == 2 else 128 # pre-selection of detections for NMS-speedup. per entire batch. self.pre_nms_limit = (500 if self.dim == 2 else 6250) * self.batch_size # anchor matching iou is lower than in Mask R-CNN according to https://arxiv.org/abs/1708.02002 self.anchor_matching_iou = 0.5 if self.model == 'retina_unet': self.operate_stride1 = True diff --git a/datasets/prostate/check_GSBx_Re.py b/datasets/prostate/check_GSBx_Re.py deleted file mode 100755 index 8f64ca1..0000000 --- a/datasets/prostate/check_GSBx_Re.py +++ /dev/null @@ -1,120 +0,0 @@ -""" -Created at 20/11/18 16:18 -@author: gregor -""" -import os -import numpy as np -import pandas as pd - - -class CombinedPrinter(object): - """combined print function. - prints to logger and/or file if given, to normal print if non given. - - """ - def __init__(self, logger=None, file=None): - - if logger is None and file is None: - self.out = [print] - elif logger is None: - self.out = [print, file.write] - elif file is None: - self.out = [print, logger.info] - else: - self.out = [print, logger.info, file.write] - - def __call__(self, string): - for fct in self.out: - fct(string) - -def spec_to_id(spec): - """Get subject id from string""" - return int(spec[-5:]) - - -def pat_roi_GS_histo_check(root_dir): - """ Check, in histo files, whether patient-wide Gleason Score equals maximum GS found in single lesions of patient. - """ - - histo_les_path = os.path.join(root_dir, "MasterHistoAll.csv") - histo_pat_path = os.path.join(root_dir, "MasterPatientbasedAll_clean.csv") - - with open(histo_les_path,mode="r") as les_file: - les_df = pd.read_csv(les_file, delimiter=",") - with open(histo_pat_path, mode="r") as pat_file: - pat_df = pd.read_csv(pat_file, delimiter=",") - - merged_df = les_df.groupby('Master_ID').agg({'Gleason': 'max', 'segmentationsNameADC': 'last'}) - - for pid in merged_df.index: - merged_df.set_value(pid, "GSBx", pat_df[pat_df.Master_ID_Short==pid].GSBx.unique().astype('uint32')) - - #print(merged_df) - print("All patient-wise GS are maximum of lesion-wise GS?", np.all(merged_df.Gleason == merged_df.GSBx), end="\n\n") - assert np.all(merged_df.Gleason == merged_df.GSBx) - - -def lesion_redone_check(root_dir, out_path=None): - """check how many les annotations without post_fix _Re exist and if exists what their GS is - """ - - histo_les_path = os.path.join(root_dir, "Dokumente/MasterHistoAll.csv") - with open(histo_les_path,mode="r") as les_file: - les_df = pd.read_csv(les_file, delimiter=",") - if out_path is not None: - out_file = open(out_path, "w") - else: - out_file = None - print_f = CombinedPrinter(file=out_file) - - data_dir = os.path.join(root_dir, "Daten") - - matches = {} - for patient in [dir for dir in os.listdir(data_dir) if dir.startswith("Master_") \ - and os.path.isdir(os.path.join(data_dir, dir))]: - matches[patient] = {} - pat_dir = os.path.join(data_dir,patient) - lesions = [os.path.splitext(file)[0] for file in os.listdir(pat_dir) if os.path.isfile(os.path.join(pat_dir,file)) and file.startswith("seg") and "LES" in file] - lesions_wo = [os.path.splitext(file)[0] for file in lesions if not "_Re" in file] - lesions_with = [file for file in lesions if "_Re" in file and not "registered" in file] - - matches[patient] = {les_wo : [] for les_wo in lesions_wo} - - for les_wo in matches[patient].keys(): - matches[patient][les_wo] += [les_with for les_with in lesions_with if les_with.startswith(les_wo)] - - missing_les_count = 0 - for patient, lesions in sorted(list(matches.items())): - pat_df = les_df[les_df.Master_ID==spec_to_id(patient)] - for les, les_matches in sorted(list(lesions.items())): - if len(les_matches)==0: - if "t2" in les.lower(): - les_GS = pat_df[pat_df.segmentationsNameT2==les]["Gleason"] - elif "adc" in les.lower(): - les_GS = pat_df[pat_df.segmentationsNameADC==les]["Gleason"] - if len(les_GS)==0: - les_GS = r"[no histo finding!]" - print_f("Patient {}, lesion {} with GS {} has no matches!\n".format(patient, les, les_GS)) - missing_les_count +=1 - else: - del matches[patient][les] - #elif len(les_matches) > 1: - # print("Patient {}, Lesion {} has {} matches: {}".format(patient, les, len(les_matches), les_matches)) - if len(matches[patient])==0: - del matches[patient] - - print_f("Total missing lesion matches: {} within {} patients".format(missing_les_count, len(matches))) - - out_file.close() - - -if __name__=="__main__": - - #root_dir = "/mnt/HDD2TB/Documents/data/prostate/data_di_ana_081118_ps384_gs71/histos/" - root_dir = "/mnt/E132-Projekte/Move_to_E132-Rohdaten/Prisma_Master/Dokumente" - pat_roi_GS_histo_check(root_dir) - - root_dir = "/mnt/E132-Projekte/Move_to_E132-Rohdaten/Prisma_Master" - out_path = os.path.join(root_dir,"lesion_redone_check.txt") - lesion_redone_check(root_dir, out_path=out_path) - diff --git a/datasets/prostate/configs.py b/datasets/prostate/configs.py deleted file mode 100644 index 2de02f3..0000000 --- a/datasets/prostate/configs.py +++ /dev/null @@ -1,588 +0,0 @@ -__author__ = '' -#credit Paul F. Jaeger - -######################### -# Example Config # -######################### - -import os -import sys -import pickle - -import numpy as np -import torch - -from collections import namedtuple - -from default_configs import DefaultConfigs - -def load_obj(file_path): - with open(file_path, 'rb') as handle: - return pickle.load(handle) - -# legends, nested classes are not handled well in multiprocessing! hence, Label class def in outer scope -Label = namedtuple("Label", ['id', 'name', 'color', 'gleasons']) -binLabel = namedtuple("Label", ['id', 'name', 'color', 'gleasons', 'bin_vals']) - - -class Configs(DefaultConfigs): #todo change to Configs - - def __init__(self, server_env=None): - ######################### - # General # - ######################### - super(Configs, self).__init__(server_env) - - ######################### - # I/O # - ######################### - - self.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_di_250519_ps384_gs6071/" - #self.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_t2_250519_ps384_gs6071/" - #self.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_analysis/" - - if server_env: - self.data_sourcedir = "/datasets/data_ramien/prostate/data_di_250519_ps384_gs6071_npz/" - #self.data_sourcedir = '/datasets/data_ramien/prostate/data_t2_250519_ps384_gs6071_npz/' - #self.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_di_ana_151118_ps384_gs60/" - - self.histo_dir = os.path.join(self.data_sourcedir,"histos/") - self.info_dict_name = 'master_info.pkl' - self.info_dict_path = os.path.join(self.data_sourcedir, self.info_dict_name) - - self.config_path = os.path.realpath(__file__) - - # one out of ['mrcnn', 'retina_net', 'retina_unet', 'detection_fpn']. - self.model = 'detection_fpn' - self.model_path = 'models/{}.py'.format(self.model if not 'retina' in self.model else 'retina_net') - self.model_path = os.path.join(self.source_dir,self.model_path) - - self.select_prototype_subset = None - - ######################### - # Preprocessing # - ######################### - self.missing_pz_subjects = [#189, 196, 198, 205, 211, 214, 215, 217, 218, 219, 220, - #223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, - #234, 235, 236, 237, 238, 239, 240, 241, 242, 244, 258, - #261, 262, 264, 267, 268, 269, 270, 271, 273, 275, 276, - #277, 278, 283 - ] - self.no_bval_radval_subjects = [57] #this guy has master id 222 - - self.prepro = { - 'data_dir': '/home/gregor/networkdrives/E132-Projekte/Move_to_E132-Rohdaten/Prisma_Master/Daten/', - 'dir_spec': 'Master', - #'images': {'t2': 'T2TRA', 'adc': 'ADC1500', 'b50': 'BVAL50', 'b500': 'BVAL500', - # 'b1000': 'BVAL1000', 'b1500': 'BVAL1500'}, - #'images': {'adc': 'ADC1500', 'b50': 'BVAL50', 'b500': 'BVAL500', 'b1000': 'BVAL1000', 'b1500': 'BVAL1500'}, - 'images': {'t2': 'T2TRA'}, - 'anatomical_masks': ['seg_T2_PRO'], # try: 'seg_T2_PRO','seg_T2_PZ', 'seg_ADC_PRO', 'seg_ADC_PZ', - 'merge_mode' : 'union', #if registered data w/ two gts: take 'union' or 'adc' or 't2' of gt - 'rename_tags': {'seg_ADC_PRO':"pro", 'seg_T2_PRO':"pro", 'seg_ADC_PZ':"pz", 'seg_T2_PZ':"pz"}, - 'lesion_postfix': '_Re', #lesion files are tagged seg_MOD_LESx - 'img_postfix': "_resampled2", #"_resampled2_registered", - 'overall_postfix': ".nrrd", #including filetype ending! - - 'histo_dir': '/home/gregor/networkdrives/E132-Projekte/Move_to_E132-Rohdaten/Prisma_Master/Dokumente/', - 'histo_dir_out': self.histo_dir, - 'histo_lesion_based': 'MasterHistoAll.csv', - 'histo_patient_based': 'MasterPatientbasedAll_clean.csv', - 'histo_id_column_name': 'Master_ID', - 'histo_pb_id_column_name': 'Master_ID_Short', #for patient histo - - 'excluded_prisma_subjects': [], - 'excluded_radval_subjects': self.no_bval_radval_subjects, - 'excluded_master_subjects': self.missing_pz_subjects, - - 'seg_labels': {'tz': 0, 'pz': 0, 'lesions':'roi'}, - #set as hard label or 'roi' to have seg labels represent obj instance count - #if not given 'lesions' are numbered highest seg label +lesion-nr-in-histofile - 'class_labels': {'lesions':'gleason'}, #0 is not bg, but first fg class! - #i.e., prepro labels are shifted by -1 towards later training labels in gt, legends, dicts, etc. - #evtly set lesions to 'gleason' and check gleason remap in prepro - #'gleason_thresh': 71, - 'gleason_mapping': {0: -1, 60:0, 71:1, 72:1, 80:1, 90:1, 91:1, 92:1}, - 'gleason_map': self.gleason_map, #see below - 'color_palette': [self.green, self.red], - - 'output_directory': self.data_sourcedir, - - 'modalities2concat' : "all", #['t2', 'adc','b50','b500','b1000','b1500'], #will be concatenated on colorchannel - 'center_of_mass_crop': True, - 'mod_scaling' : (1,1,1), #z,y,x - 'pre_crop_size': [20, 384, 384], #z,y,x, z-cropping and non-square not implemented atm!! - 'swap_yx_to_xy': False, #change final spatial shape from z,y,x to z,x,y - 'normalization': {'percentiles':[1., 99.]}, - 'interpolation': 'nearest', - - 'observables_patient': ['Original_ID', 'GSBx', 'PIRADS2', 'PSA'], - 'observables_rois': ['lesion_gleasons'], - - 'info_dict_path': self.info_dict_path, - - 'npz_dir' : self.data_sourcedir[:-1]+"_npz" #if not None: convert to npz, copy data here - } - if self.prepro["modalities2concat"] == "all": - self.prepro["modalities2concat"] = list(self.prepro["images"].keys()) - - ######################### - # Architecture # - ######################### - - # dimension the model operates in. one out of [2, 3]. - self.dim = 2 - - # 'class': standard object classification per roi, pairwise combinable with each of below tasks. - # if 'class' is omitted from tasks, object classes will be fg/bg (1/0) from RPN. - # 'regression': regress some vector per each roi - # 'regression_ken_gal': use kendall-gal uncertainty sigma - # 'regression_bin': classify each roi into a bin related to a regression scale - self.prediction_tasks = ['class',] - - self.start_filts = 48 if self.dim == 2 else 18 - self.end_filts = self.start_filts * 4 if self.dim == 2 else self.start_filts * 2 - self.res_architecture = 'resnet50' # 'resnet101' or 'resnet50' - self.weight_init = None #'kaiming_normal' #, 'xavier' or None-->pytorch standard, - self.norm = None #'instance_norm' # one of 'None', 'instance_norm', 'batch_norm' - self.relu = 'relu' # 'relu' or 'leaky_relu' - - self.regression_n_features = 1 #length of regressor target vector (always 1D) - - ######################### - # Data Loader # - ######################### - - self.seed = 17 - self.n_workers = 16 if server_env else os.cpu_count() - - self.batch_size = 10 if self.dim == 2 else 6 - - self.channels = [1, 2, 3, 4] # modalities2load, see prepo - self.n_channels = len(self.channels) # for compatibility, but actually redundant - # which channel (mod) to show as bg in plotting, will be extra added to batch if not in self.channels - self.plot_bg_chan = 0 - self.pre_crop_size = list(np.array(self.prepro['pre_crop_size'])[[1, 2, 0]]) # now y,x,z - self.crop_margin = [20, 20, 1] # has to be smaller than respective patch_size//2 - self.patch_size_2D = self.pre_crop_size[:2] #[288, 288] - self.patch_size_3D = self.pre_crop_size[:2] + [8] # only numbers divisible by 2 multiple times - # (at least 5 times for x,y, at least 3 for z)! - # otherwise likely to produce error in crop fct or net - self.patch_size = self.patch_size_2D if self.dim == 2 else self.patch_size_3D - - self.balance_target = "class_targets" if 'class' in self.prediction_tasks else 'rg_bin_targets' - # ratio of fully random patients drawn during batch generation - # resulting batch random count is rounded down to closest integer - self.batch_random_ratio = 0.2 if self.dim==2 else 0.4 - - self.observables_patient = ['Original_ID', 'GSBx', 'PIRADS2'] - self.observables_rois = ['lesion_gleasons'] - - self.regression_target = "lesion_gleasons" # name of the info_dict entry holding regression targets - # linear mapping - self.rg_map = {0: 0, 60: 1, 71: 2, 72: 3, 80: 4, 90: 5, 91: 6, 92: 7, None: 0} - # non-linear mapping - #self.rg_map = {0: 0, 60: 1, 71: 6, 72: 7.5, 80: 9, 90: 10, 91: 10, 92: 10, None: 0} - - ######################### - # Colors and Legends # - ######################### - self.plot_frequency = 5 - - # colors - self.gravity_col_palette = [self.green, self.yellow, self.orange, self.bright_red, self.red, self.dark_red] - - self.gs_labels = [ - Label(0, 'bg', self.gray, (0,)), - Label(60, 'GS60', self.dark_green, (60,)), - Label(71, 'GS71', self.dark_yellow, (71,)), - Label(72, 'GS72', self.orange, (72,)), - Label(80, 'GS80', self.brighter_red,(80,)), - Label(90, 'GS90', self.bright_red, (90,)), - Label(91, 'GS91', self.red, (91,)), - Label(92, 'GS92', self.dark_red, (92,)) - ] - self.gs2label = {label.id: label for label in self.gs_labels} - - - binary_cl_labels = [Label(1, 'benign', (*self.green, 1.), (60,)), - Label(2, 'malignant', (*self.red, 1.), (71,72,80,90,91,92)), - #Label(3, 'pz', (*self.blue, 1.), (None,)), - #Label(4, 'tz', (*self.aubergine, 1.), (None,)) - ] - - self.class_labels = [ - #id #name #color #gleason score - Label( 0, 'bg', (*self.gray, 0.), (0,))] - if "class" in self.prediction_tasks: - self.class_labels += binary_cl_labels - # self.class_labels += [Label(cl, cl_dic["name"], cl_dic["color"], tuple(cl_dic["gleasons"])) - # for cl, cl_dic in - # load_obj(os.path.join(self.data_sourcedir, "pp_class_labels.pkl")).items()] - else: - self.class_labels += [Label( 1, 'lesion', (*self.red, 1.), (60,71,72,80,90,91,92))] - - if any(['regression' in task for task in self.prediction_tasks]): - self.bin_labels = [binLabel(0, 'bg', (*self.gray, 0.), (0,), (0,))] - self.bin_labels += [binLabel(cl, cl_dic["name"], cl_dic["color"], tuple(cl_dic["gleasons"]), - tuple([self.rg_map[gs] for gs in cl_dic["gleasons"]])) for cl, cl_dic in - sorted(load_obj(os.path.join(self.data_sourcedir, "pp_class_labels.pkl")).items())] - self.bin_id2label = {label.id: label for label in self.bin_labels} - self.gs2bin_label = {gs: label for label in self.bin_labels for gs in label.gleasons} - bins = [(min(label.bin_vals), max(label.bin_vals)) for label in self.bin_labels] - self.bin_id2rg_val = {ix: [np.mean(bin)] for ix, bin in enumerate(bins)} - self.bin_edges = [(bins[i][1] + bins[i+1][0]) / 2 for i in range(len(bins)-1)] - self.bin_dict = {label.id: label.name for label in self.bin_labels if label.id != 0} - - - if self.class_specific_seg: - self.seg_labels = self.class_labels - else: - self.seg_labels = [ # id #name #color - Label(0, 'bg', (*self.white, 0.)), - Label(1, 'fg', (*self.orange, 1.)) - ] - - self.class_id2label = {label.id: label for label in self.class_labels} - self.class_dict = {label.id: label.name for label in self.class_labels if label.id != 0} - # class_dict is used in evaluator / ap, auc, etc. statistics, and class 0 (bg) only needs to be - # evaluated in debugging - self.class_cmap = {label.id: label.color for label in self.class_labels} - - self.seg_id2label = {label.id: label for label in self.seg_labels} - self.cmap = {label.id: label.color for label in self.seg_labels} - - self.plot_prediction_histograms = True - self.plot_stat_curves = False - self.plot_class_ids = True - - self.num_classes = len(self.class_dict) # for instance classification (excl background) - self.num_seg_classes = len(self.seg_labels) # incl background - - ######################### - # Data Augmentation # - ######################### - #the angle rotations are implemented incorrectly in batchgenerators! in 2D, - #the x-axis angle controls the z-axis angle. - if self.dim == 2: - angle_x = (-np.pi / 3., np.pi / 3.) - angle_z = (0.,0.) - rcd = (self.patch_size[0] / 2., self.patch_size[1] / 2.) - else: - angle_x = (0.,0.) - angle_z = (-np.pi / 2., np.pi / 2.) - rcd = (self.patch_size[0] / 2., self.patch_size[1] / 2., - self.patch_size[2] / 2.) - - self.do_aug = True - # DA settings for DWI - self.da_kwargs = { - 'mirror': True, - 'mirror_axes': tuple(np.arange(0, self.dim, 1)), - 'random_crop': True, - 'rand_crop_dist': rcd, - 'do_elastic_deform': self.dim==2, - 'alpha': (0., 1500.), - 'sigma': (25., 50.), - 'do_rotation': True, - 'angle_x': angle_x, - 'angle_y': (0., 0.), - 'angle_z': angle_z, - 'do_scale': True, - 'scale': (0.7, 1.3), - 'border_mode_data': 'constant', - 'gamma_transform': True, - 'gamma_range': (0.5, 2.) - } - # for T2 - # self.da_kwargs = { - # 'mirror': True, - # 'mirror_axes': tuple(np.arange(0, self.dim, 1)), - # 'random_crop': False, - # 'rand_crop_dist': rcd, - # 'do_elastic_deform': False, - # 'alpha': (0., 1500.), - # 'sigma': (25., 50.), - # 'do_rotation': True, - # 'angle_x': angle_x, - # 'angle_y': (0., 0.), - # 'angle_z': angle_z, - # 'do_scale': False, - # 'scale': (0.7, 1.3), - # 'border_mode_data': 'constant', - # 'gamma_transform': False, - # 'gamma_range': (0.5, 2.) - # } - - - ################################# - # Schedule / Selection / Optim # - ################################# - - # good guess: train for n_samples = 1.1m = epochs*n_train_bs*b_size - self.num_epochs = 270 - self.num_train_batches = 120 if self.dim == 2 else 140 - - self.val_mode = 'val_patient' # one of 'val_sampling', 'val_patient' - # decide whether to validate on entire patient volumes (like testing) or sampled patches (like training) - # the former is more accurate, while the latter is faster (depending on volume size) - self.num_val_batches = 200 if self.dim==2 else 40 # for val_sampling, number or "all" - self.max_val_patients = "all" #for val_patient, "all" takes whole split - - self.save_n_models = 6 - self.min_save_thresh = 3 if self.dim == 2 else 4 #=wait time in epochs - if "class" in self.prediction_tasks: - # 'criterion': weight - self.model_selection_criteria = {"benign_ap": 0.2, "malignant_ap": 0.8} - elif any("regression" in task for task in self.prediction_tasks): - self.model_selection_criteria = {"lesion_ap": 0.2, "lesion_avp": 0.8} - #self.model_selection_criteria = {"GS71-92_ap": 0.9, "GS60_ap": 0.1} # 'criterion':weight - #self.model_selection_criteria = {"lesion_ap": 0.2, "lesion_avp": 0.8} - #self.model_selection_criteria = {label.name+"_ap": 1. for label in self.class_labels if label.id!=0} - - self.scan_det_thresh = False - self.warm_up = 0 - - self.optimizer = "ADAM" - self.weight_decay = 1e-5 - self.clip_norm = None #number or None - - self.learning_rate = [1e-4] * self.num_epochs - self.dynamic_lr_scheduling = True - self.lr_decay_factor = 0.5 - self.scheduling_patience = int(self.num_epochs / 6) - - ######################### - # Testing # - ######################### - - self.test_aug_axes = (0,1,(0,1)) # None or list: choices are 0,1,(0,1) (0==spatial y, 1== spatial x). - self.held_out_test_set = False - self.max_test_patients = "all" # "all" or number - self.report_score_level = ['rois', 'patient'] # 'patient' or 'rois' (incl) - self.patient_class_of_interest = 2 if 'class' in self.prediction_tasks else 1 - - - self.eval_bins_separately = "additionally" if not 'class' in self.prediction_tasks else False - self.patient_bin_of_interest = 2 - self.metrics = ['ap', 'auc', 'dice'] - if any(['regression' in task for task in self.prediction_tasks]): - self.metrics += ['avp', 'rg_MAE_weighted', 'rg_MAE_weighted_tp', - 'rg_bin_accuracy_weighted', 'rg_bin_accuracy_weighted_tp'] - if 'aleatoric' in self.model: - self.metrics += ['rg_uncertainty', 'rg_uncertainty_tp', 'rg_uncertainty_tp_weighted'] - self.evaluate_fold_means = True - - self.min_det_thresh = 0.02 - - self.ap_match_ious = [0.1] # threshold(s) for considering a prediction as true positive - # aggregation method for test and val_patient predictions. - # wbc = weighted box clustering as in https://arxiv.org/pdf/1811.08661.pdf, - # nms = standard non-maximum suppression, or None = no clustering - self.clustering = 'wbc' - # iou thresh (exclusive!) for regarding two preds as concerning the same ROI - self.clustering_iou = 0.1 # has to be larger than desired possible overlap iou of model predictions - # 2D-3D merging is applied independently from clustering setting. - self.merge_2D_to_3D_preds = True if self.dim == 2 else False - self.merge_3D_iou = 0.1 - self.n_test_plots = 1 # per fold and rank - self.test_n_epochs = self.save_n_models # should be called n_test_ens, since is number of models to ensemble over during testing - # is multiplied by n_test_augs if test_aug - - ######################### - # shared model settings # - ######################### - - # max number of roi candidates to identify per image and class (slice in 2D, volume in 3D) - self.n_roi_candidates = 10 if self.dim == 2 else 15 - - ######################### - # assertions # - ######################### - if not 'class' in self.prediction_tasks: - assert self.num_classes == 1 - for mod in self.prepro['modalities2concat']: - assert mod in self.prepro['images'].keys(), "need to adapt mods2concat to chosen images" - - ######################### - # Add model specifics # - ######################### - - {'mrcnn': self.add_mrcnn_configs, 'mrcnn_aleatoric': self.add_mrcnn_configs, - 'mrcnn_gan': self.add_mrcnn_configs, - 'retina_net': self.add_mrcnn_configs, 'retina_unet': self.add_mrcnn_configs, - 'detection_unet': self.add_det_unet_configs, 'detection_fpn': self.add_det_fpn_configs - }[self.model]() - - def gleason_map(self, GS): - """gleason to class id - :param GS: gleason score as in histo file - """ - if "gleason_thresh" in self.prepro.keys(): - assert "gleason_mapping" not in self.prepro.keys(), "cant define both, thresh and map, for GS to classes" - # -1 == bg, 0 == benign, 1 == malignant - # before shifting, i.e., 0!=bg, but 0==first class - remapping = 0 if GS >= self.prepro["gleason_thresh"] else -1 - return remapping - elif "gleason_mapping" in self.prepro.keys(): - return self.prepro["gleason_mapping"][GS] - else: - raise Exception("Need to define some remapping, at least GS 0 -> background (class -1)") - - def rg_val_to_bin_id(self, rg_val): - return float(np.digitize(rg_val, self.bin_edges)) - - def add_det_fpn_configs(self): - self.scheduling_criterion = 'torch_loss' - self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' - - # loss mode: either weighted cross entropy ('wce'), batch-wise dice loss ('dice), or the sum of both ('dice_wce') - self.seg_loss_mode = 'wce' - self.wce_weights = [1]*self.num_seg_classes if 'dice' in self.seg_loss_mode else [0.1, 1, 1] - # if <1, false positive predictions in foreground are penalized less. - self.fp_dice_weight = 1 if self.dim == 2 else 1 - - - self.detection_min_confidence = 0.05 - #how to determine score of roi: 'max' or 'median' - self.score_det = 'max' - - self.cuda_benchmark = self.dim==3 - - def add_det_unet_configs(self): - self.scheduling_criterion = "torch_loss" - self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' - - # loss mode: either weighted cross entropy ('wce'), batch-wise dice loss ('dice), or the sum of both ('dice_wce') - self.seg_loss_mode = 'wce' - self.wce_weights = [1] * self.num_seg_classes if 'dice' in self.seg_loss_mode else [0.1, 1, 1] - # if <1, false positive predictions in foreground are penalized less. - self.fp_dice_weight = 1 if self.dim == 2 else 1 - - self.detection_min_confidence = 0.05 - #how to determine score of roi: 'max' or 'median' - self.score_det = 'max' - - self.init_filts = 32 - self.kernel_size = 3 #ks for horizontal, normal convs - self.kernel_size_m = 2 #ks for max pool - self.pad = "same" # "same" or integer, padding of horizontal convs - - self.cuda_benchmark = True - - def add_mrcnn_configs(self): - - self.scheduling_criterion = max(self.model_selection_criteria, key=self.model_selection_criteria.get) - self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' - - # number of classes for network heads: n_foreground_classes + 1 (background) - self.head_classes = self.num_classes + 1 - # - # feed +/- n neighbouring slices into channel dimension. set to None for no context. - self.n_3D_context = None - if self.n_3D_context is not None and self.dim == 2: - self.n_channels *= (self.n_3D_context * 2 + 1) - - self.frcnn_mode = False - # disable the re-sampling of mask proposals to original size for speed-up. - # since evaluation is detection-driven (box-matching) and not instance segmentation-driven (iou-matching), - # mask outputs are optional. - self.return_masks_in_train = True - self.return_masks_in_val = True - self.return_masks_in_test = True - - # feature map strides per pyramid level are inferred from architecture. anchor scales are set accordingly. - self.backbone_strides = {'xy': [4, 8, 16, 32], 'z': [1, 2, 4, 8]} - # anchor scales are chosen according to expected object sizes in data set. Default uses only one anchor scale - # per pyramid level. (outer list are pyramid levels (corresponding to BACKBONE_STRIDES), inner list are scales per level.) - self.rpn_anchor_scales = {'xy': [[4], [8], [16], [32]], 'z': [[1], [2], [4], [8]]} - # choose which pyramid levels to extract features from: P2: 0, P3: 1, P4: 2, P5: 3. - self.pyramid_levels = [0, 1, 2, 3] - # number of feature maps in rpn. typically lowered in 3D to save gpu-memory. - self.n_rpn_features = 512 if self.dim == 2 else 128 - - # anchor ratios and strides per position in feature maps. - self.rpn_anchor_ratios = [0.5,1.,2.] - self.rpn_anchor_stride = 1 - # Threshold for first stage (RPN) non-maximum suppression (NMS): LOWER == HARDER SELECTION - self.rpn_nms_threshold = 0.7 if self.dim == 2 else 0.7 - - # loss sampling settings. - self.rpn_train_anchors_per_image = 6 - self.train_rois_per_image = 6 #per batch_instance - self.roi_positive_ratio = 0.5 - self.anchor_matching_iou = 0.7 - - # k negative example candidates are drawn from a pool of size k*shem_poolsize (stochastic hard-example mining), - # where k<=#positive examples. - self.shem_poolsize = 3 - - self.pool_size = (7, 7) if self.dim == 2 else (7, 7, 3) - self.mask_pool_size = (14, 14) if self.dim == 2 else (14, 14, 5) - self.mask_shape = (28, 28) if self.dim == 2 else (28, 28, 10) - - self.rpn_bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) - self.bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) - self.window = np.array([0, 0, self.patch_size[0], self.patch_size[1], 0, self.patch_size_3D[2]]) - self.scale = np.array([self.patch_size[0], self.patch_size[1], self.patch_size[0], self.patch_size[1], - self.patch_size_3D[2], self.patch_size_3D[2]]) #y1,x1,y2,x2,z1,z2 - - if self.dim == 2: - self.rpn_bbox_std_dev = self.rpn_bbox_std_dev[:4] - self.bbox_std_dev = self.bbox_std_dev[:4] - self.window = self.window[:4] - self.scale = self.scale[:4] - - self.plot_y_max = 1.5 - self.n_plot_rpn_props = 5 if self.dim == 2 else 30 #per batch_instance (slice in 2D / patient in 3D) - - # pre-selection in proposal-layer (stage 1) for NMS-speedup. applied per batch element. - self.pre_nms_limit = 3000 if self.dim == 2 else 6000 - - # n_proposals to be selected after NMS per batch element. too high numbers blow up memory if "detect_while_training" is True, - # since proposals of the entire batch are forwarded through second stage in as one "batch". - self.roi_chunk_size = 2000 if self.dim == 2 else 400 - self.post_nms_rois_training = 250 * (self.head_classes-1) if self.dim == 2 else 500 - self.post_nms_rois_inference = 250 * (self.head_classes-1) - - # Final selection of detections (refine_detections) - self.model_max_instances_per_batch_element = self.n_roi_candidates # per batch element and class. - # iou for nms in box refining (directly after heads), should be >0 since ths>=x in mrcnn.py, otherwise all predictions are one cluster. - self.detection_nms_threshold = 1e-5 - # detection score threshold in refine_detections() - self.model_min_confidence = 0.05 #self.min_det_thresh/2 - - if self.dim == 2: - self.backbone_shapes = np.array( - [[int(np.ceil(self.patch_size[0] / stride)), - int(np.ceil(self.patch_size[1] / stride))] - for stride in self.backbone_strides['xy']]) - else: - self.backbone_shapes = np.array( - [[int(np.ceil(self.patch_size[0] / stride)), - int(np.ceil(self.patch_size[1] / stride)), - int(np.ceil(self.patch_size[2] / stride_z))] - for stride, stride_z in zip(self.backbone_strides['xy'], self.backbone_strides['z'] - )]) - - self.operate_stride1 = False - - if self.model == 'retina_net' or self.model == 'retina_unet': - self.cuda_benchmark = self.dim == 3 - #implement extra anchor-scales according to https://arxiv.org/abs/1708.02002 - self.rpn_anchor_scales['xy'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in - self.rpn_anchor_scales['xy']] - self.rpn_anchor_scales['z'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in - self.rpn_anchor_scales['z']] - self.n_anchors_per_pos = len(self.rpn_anchor_ratios) * 3 - - self.n_rpn_features = 256 if self.dim == 2 else 64 - - # pre-selection of detections for NMS-speedup. per entire batch. - self.pre_nms_limit = (1000 if self.dim == 2 else 6250) * self.batch_size - - # anchor matching iou is lower than in Mask R-CNN according to https://arxiv.org/abs/1708.02002 - self.anchor_matching_iou = 0.5 - - if self.model == 'retina_unet': - self.operate_stride1 = True \ No newline at end of file diff --git a/datasets/prostate/data_loader.py b/datasets/prostate/data_loader.py deleted file mode 100644 index 69c53e6..0000000 --- a/datasets/prostate/data_loader.py +++ /dev/null @@ -1,716 +0,0 @@ -__author__ = '' -#credit derives from Paul Jaeger, Simon Kohl - -import os -import time -import warnings - -from collections import OrderedDict -import pickle - -import numpy as np -import pandas as pd - -# batch generator tools from https://github.com/MIC-DKFZ/batchgenerators -from batchgenerators.augmentations.utils import resize_image_by_padding, center_crop_2D_image, center_crop_3D_image -from batchgenerators.dataloading.data_loader import SlimDataLoaderBase -from batchgenerators.transforms.spatial_transforms import MirrorTransform as Mirror -from batchgenerators.transforms.abstract_transforms import Compose -from batchgenerators.dataloading.multi_threaded_augmenter import MultiThreadedAugmenter -from batchgenerators.dataloading import SingleThreadedAugmenter -from batchgenerators.transforms.spatial_transforms import SpatialTransform -from batchgenerators.transforms.crop_and_pad_transforms import CenterCropTransform -#from batchgenerators.transforms.utility_transforms import ConvertSegToBoundingBoxCoordinates -from batchgenerators.transforms import AbstractTransform -from batchgenerators.transforms.color_transforms import GammaTransform - -#sys.path.append(os.path.dirname(os.path.realpath(__file__))) - -#import utils.exp_utils as utils -import utils.dataloader_utils as dutils -from utils.dataloader_utils import ConvertSegToBoundingBoxCoordinates -import data_manager as dmanager - - -def load_obj(file_path): - with open(file_path, 'rb') as handle: - return pickle.load(handle) - -def id_to_spec(id, base_spec): - """Construct subject specifier from base string and an integer subject number.""" - num_zeros = 5 - len(str(id)) - assert num_zeros>=0, "id_to_spec: patient id too long to fit into 5 figures" - return base_spec + '_' + ('').join(['0'] * num_zeros) + str(id) - -def convert_3d_to_2d_generator(data_dict, shape="bcxyz"): - """Fold/Shape z-dimension into color-channel. - :param shape: bcxyz or bczyx - :return: shape b(c*z)xy or b(c*z)yx - """ - if shape=="bcxyz": - data_dict['data'] = np.transpose(data_dict['data'], axes=(0,1,4,3,2)) - data_dict['seg'] = np.transpose(data_dict['seg'], axes=(0,1,4,3,2)) - elif shape=="bczyx": - pass - else: - raise Exception("unknown datashape {} in 3d_to_2d transform converter".format(shape)) - - shp = data_dict['data'].shape - data_dict['orig_shape_data'] = shp - seg_shp = data_dict['seg'].shape - data_dict['orig_shape_seg'] = seg_shp - - data_dict['data'] = data_dict['data'].reshape((shp[0], shp[1] * shp[2], shp[3], shp[4])) - data_dict['seg'] = data_dict['seg'].reshape((seg_shp[0], seg_shp[1] * seg_shp[2], seg_shp[3], seg_shp[4])) - - return data_dict - -def convert_2d_to_3d_generator(data_dict, shape="bcxyz"): - """Unfold z-dimension from color-channel. - data needs to be in shape bcxy or bcyx, x,y dims won't be swapped relative to each other. - :param shape: target shape, bcxyz or bczyx - """ - shp = data_dict['orig_shape_data'] - cur_shape = data_dict['data'].shape - seg_shp = data_dict['orig_shape_seg'] - cur_shape_seg = data_dict['seg'].shape - - data_dict['data'] = data_dict['data'].reshape((shp[0], shp[1], shp[2], cur_shape[-2], cur_shape[-1])) - data_dict['seg'] = data_dict['seg'].reshape((seg_shp[0], seg_shp[1], seg_shp[2], cur_shape_seg[-2], cur_shape_seg[-1])) - - if shape=="bcxyz": - data_dict['data'] = np.transpose(data_dict['data'], axes=(0,1,4,3,2)) - data_dict['seg'] = np.transpose(data_dict['seg'], axes=(0,1,4,3,2)) - return data_dict - -class Convert3DTo2DTransform(AbstractTransform): - def __init__(self): - pass - - def __call__(self, **data_dict): - return convert_3d_to_2d_generator(data_dict) - -class Convert2DTo3DTransform(AbstractTransform): - def __init__(self): - pass - - def __call__(self, **data_dict): - return convert_2d_to_3d_generator(data_dict) - -def vector(item): - """ensure item is vector-like (list or array or tuple) - :param item: anything - """ - if not isinstance(item, (list, tuple, np.ndarray)): - item = [item] - return item - -class Dataset(dutils.Dataset): - r"""Load a dict holding memmapped arrays and clinical parameters for each patient, - evtly subset of those. - If server_env: copy and evtly unpack (npz->npy) data in cf.data_rootdir to - cf.data_dest. - :param cf: config file - :param data_dir: directory in which to find data, defaults to cf.data_dir if None. - :return: dict with imgs, segs, pids, class_labels, observables - """ - - def __init__(self, cf, logger=None, subset_ids=None, data_sourcedir=None): - super(Dataset,self).__init__(cf, data_sourcedir=data_sourcedir) - - info_dict = load_obj(cf.info_dict_path) - - if subset_ids is not None: - pids = subset_ids - if logger is None: - print('subset: selected {} instances from df'.format(len(pids))) - else: - logger.info('subset: selected {} instances from df'.format(len(pids))) - else: - pids = list(info_dict.keys()) - - #evtly copy data from data_rootdir to data_dir - if cf.server_env and not hasattr(cf, "data_dir"): - file_subset = [info_dict[pid]['img'][:-3]+"*" for pid in pids] - file_subset+= [info_dict[pid]['seg'][:-3]+"*" for pid in pids] - file_subset += [cf.info_dict_path] - self.copy_data(cf, file_subset=file_subset) - cf.data_dir = self.data_dir - - img_paths = [os.path.join(self.data_dir, info_dict[pid]['img']) for pid in pids] - seg_paths = [os.path.join(self.data_dir, info_dict[pid]['seg']) for pid in pids] - - # load all subject files - self.data = OrderedDict() - for i, pid in enumerate(pids): - subj_spec = id_to_spec(pid, cf.prepro['dir_spec']) - subj_data = {'pid':pid, "spec":subj_spec} - subj_data['img'] = img_paths[i] - subj_data['seg'] = seg_paths[i] - #read, add per-roi labels - for obs in cf.observables_patient+cf.observables_rois: - subj_data[obs] = np.array(info_dict[pid][obs]) - if 'class' in self.cf.prediction_tasks: - subj_data['class_targets'] = np.array(info_dict[pid]['roi_classes'], dtype='uint8') + 1 - else: - subj_data['class_targets'] = np.ones_like(np.array(info_dict[pid]['roi_classes']), dtype='uint8') - if any(['regression' in task for task in self.cf.prediction_tasks]): - if hasattr(cf, "rg_map"): - subj_data["regression_targets"] = np.array([vector(cf.rg_map[v]) for v in info_dict[pid][cf.regression_target]], dtype='float16') - else: - subj_data["regression_targets"] = np.array([vector(v) for v in info_dict[pid][cf.regression_target]], dtype='float16') - subj_data["rg_bin_targets"] = np.array([cf.rg_val_to_bin_id(v) for v in subj_data["regression_targets"]], dtype='uint8') - subj_data['fg_slices'] = info_dict[pid]['fg_slices'] - - self.data[pid] = subj_data - - cf.roi_items = cf.observables_rois[:] - cf.roi_items += ['class_targets'] - if any(['regression' in task for task in self.cf.prediction_tasks]): - cf.roi_items += ['regression_targets'] - cf.roi_items += ['rg_bin_targets'] - #cf.patient_items = cf.observables_patient[:] - #patient-wise items not used currently - self.set_ids = np.array(list(self.data.keys())) - - self.df = None - -class BatchGenerator(dutils.BatchGenerator): - """ - create the training/validation batch generator. Randomly sample batch_size patients - from the data set, (draw a random slice if 2D), pad-crop them to equal sizes and merge to an array. - :param data: data dictionary as provided by 'load_dataset' - :param img_modalities: list of strings ['adc', 'b1500'] from config - :param batch_size: number of patients to sample for the batch - :param pre_crop_size: equal size for merging the patients to a single array (before the final random-crop in data aug.) - :param sample_pids_w_replace: whether to randomly draw pids from dataset for batch generation. if False, step through whole dataset - before repition. - :return dictionary containing the batch data / seg / pids as lists; the augmenter will later concatenate them into an array. - """ - def __init__(self, cf, data, n_batches=None, sample_pids_w_replace=True): - super(BatchGenerator, self).__init__(cf, data, n_batches) - self.dataset_length = len(self._data) - self.cf = cf - - self.sample_pids_w_replace = sample_pids_w_replace - self.eligible_pids = list(self._data.keys()) - - self.chans = cf.channels if cf.channels is not None else np.index_exp[:] - assert hasattr(self.chans, "__iter__"), "self.chans has to be list-like to maintain dims when slicing" - - self.p_fg = 0.5 - self.empty_samples_max_ratio = 0.6 - self.random_count = int(cf.batch_random_ratio * cf.batch_size) - - self.balance_target_distribution(plot=sample_pids_w_replace) - self.stats = {"roi_counts" : np.zeros((len(self.unique_ts),), dtype='uint32'), "empty_samples_count" : 0} - - def generate_train_batch(self): - #everything done in here is per batch - #print statements in here get confusing due to multithreading - if self.sample_pids_w_replace: - # fully random patients - batch_patient_ids = list(np.random.choice(self.dataset_pids, size=self.random_count, replace=False)) - # target-balanced patients - batch_patient_ids += list(np.random.choice( - self.dataset_pids, size=self.batch_size - self.random_count, replace=False, p=self.p_probs)) - else: - batch_patient_ids = np.random.choice(self.eligible_pids, size=self.batch_size, - replace=False) - if self.sample_pids_w_replace == False: - self.eligible_pids = [pid for pid in self.eligible_pids if pid not in batch_patient_ids] - if len(self.eligible_pids) < self.batch_size: - self.eligible_pids = self.dataset_pids - - batch_data, batch_segs, batch_patient_specs = [], [], [] - batch_roi_items = {name: [] for name in self.cf.roi_items} - #record roi count of classes in batch - batch_roi_counts, empty_samples_count = np.zeros((len(self.unique_ts),), dtype='uint32'), 0 - #empty count for full bg samples (empty slices in 2D/patients in 3D) - - for sample in range(self.batch_size): - - patient = self._data[batch_patient_ids[sample]] - - #swap dimensions from (c,)z,y,x to (c,)y,x,z or h,w,d to ease 2D/3D-case handling - data = np.transpose(np.load(patient['img'], mmap_mode='r'), axes=(0, 2, 3, 1))[self.chans] - seg = np.transpose(np.load(patient['seg'], mmap_mode='r'), axes=(1, 2, 0)) - (c,y,x,z) = data.shape - - #original data is 3D MRIs, so need to pick (e.g. randomly) single slice to make it 2D, - #consider batch roi-class balance - if self.cf.dim == 2: - elig_slices, choose_fg = [], False - if self.sample_pids_w_replace and len(patient['fg_slices']) > 0: - if empty_samples_count / self.batch_size >= self.empty_samples_max_ratio or np.random.rand( - 1) <= self.p_fg: - # fg is to be picked - for tix in np.argsort(batch_roi_counts): - # pick slices of patient that have roi of sought-for target - # np.unique(seg[...,sl_ix][seg[...,sl_ix]>0]) gives roi_ids (numbering) of rois in slice sl_ix - elig_slices = [sl_ix for sl_ix in np.arange(z) if np.count_nonzero( - patient[self.balance_target][np.unique(seg[..., sl_ix][seg[..., sl_ix] > 0]) - 1] == - self.unique_ts[tix]) > 0] - if len(elig_slices) > 0: - choose_fg = True - break - else: - # pick bg - elig_slices = np.setdiff1d(np.arange(z), patient['fg_slices']) - if len(elig_slices) == 0: - elig_slices = z - sl_pick_ix = np.random.choice(elig_slices, size=None) - data = data[..., sl_pick_ix] - seg = seg[..., sl_pick_ix] - - spatial_shp = data[0].shape - assert spatial_shp==seg.shape, "spatial shape incongruence betw. data and seg" - - if np.any([spatial_shp[ix] < self.cf.pre_crop_size[ix] for ix in range(len(spatial_shp))]): - new_shape = [np.max([spatial_shp[ix], self.cf.pre_crop_size[ix]]) for ix in range(len(spatial_shp))] - data = dutils.pad_nd_image(data, (len(data), *new_shape)) - seg = dutils.pad_nd_image(seg, new_shape) - - #eventual cropping to pre_crop_size: with prob self.p_fg sample pixel from random ROI and shift center, - #if possible, to that pixel, so that img still contains ROI after pre-cropping - dim_cropflags = [spatial_shp[i] > self.cf.pre_crop_size[i] for i in range(len(spatial_shp))] - if np.any(dim_cropflags): - print("dim crop applied") - # sample pixel from random ROI and shift center, if possible, to that pixel - if self.cf.dim==3: - choose_fg = (empty_samples_count/self.batch_size>=self.empty_samples_max_ratio) or np.random.rand(1) <= self.p_fg - if self.sample_pids_w_replace and choose_fg and np.any(seg): - available_roi_ids = np.unique(seg)[1:] - for tix in np.argsort(batch_roi_counts): - elig_roi_ids = available_roi_ids[ - patient[self.balance_target][available_roi_ids - 1] == self.unique_ts[tix]] - if len(elig_roi_ids) > 0: - seg_ics = np.argwhere(seg == np.random.choice(elig_roi_ids, size=None)) - break - roi_anchor_pixel = seg_ics[np.random.choice(seg_ics.shape[0], size=None)] - assert seg[tuple(roi_anchor_pixel)] > 0 - - # sample the patch center coords. constrained by edges of image - pre_crop_size /2 and - # distance to the selected ROI < patch_size /2 - def get_cropped_centercoords(dim): - low = np.max((self.cf.pre_crop_size[dim]//2, - roi_anchor_pixel[dim] - (self.cf.patch_size[dim]//2 - self.cf.crop_margin[dim]))) - high = np.min((spatial_shp[dim] - self.cf.pre_crop_size[dim]//2, - roi_anchor_pixel[dim] + (self.cf.patch_size[dim]//2 - self.cf.crop_margin[dim]))) - if low >= high: #happens if lesion on the edge of the image. - #print('correcting low/high:', low, high, spatial_shp, roi_anchor_pixel, dim) - low = self.cf.pre_crop_size[dim] // 2 - high = spatial_shp[dim] - self.cf.pre_crop_size[dim]//2 - - assert low0]) - 1] == self.unique_ts[tix]) - if not np.any(seg): - empty_samples_count += 1 - - #self.stats['roi_counts'] += batch_roi_counts #DOESNT WORK WITH MULTITHREADING! do outside - #self.stats['empty_samples_count'] += empty_samples_count - - batch = {'data': np.array(batch_data), 'seg': np.array(batch_segs).astype('uint8'), - 'pid': batch_patient_ids, 'spec': batch_patient_specs, - 'roi_counts':batch_roi_counts, 'empty_samples_count': empty_samples_count} - for key,val in batch_roi_items.items(): #extend batch dic by roi-wise items (obs, class ids, regression vectors...) - batch[key] = np.array(val) - - return batch - -class PatientBatchIterator(dutils.PatientBatchIterator): - """ - creates a val/test generator. Step through the dataset and return dictionaries per patient. - 2D is a special case of 3D patching with patch_size[2] == 1 (slices) - Creates whole Patient batch and targets, and - if necessary - patchwise batch and targets. - Appends patient targets anyway for evaluation. - For Patching, shifts all patches into batch dimension. batch_tiling_forward will take care of exceeding batch dimensions. - - This iterator/these batches are not intended to go through MTaugmenter afterwards - """ - - def __init__(self, cf, data): - super(PatientBatchIterator, self).__init__(cf, data) - - self.patient_ix = 0 #running index over all patients in set - - self.patch_size = cf.patch_size+[1] if cf.dim==2 else cf.patch_size - self.chans = cf.channels if cf.channels is not None else np.index_exp[:] - assert hasattr(self.chans, "__iter__"), "self.chans has to be list-like to maintain dims when slicing" - - def generate_train_batch(self, pid=None): - - if self.patient_ix == len(self.dataset_pids): - self.patient_ix = 0 - if pid is None: - pid = self.dataset_pids[self.patient_ix] # + self.thread_id - patient = self._data[pid] - - #swap dimensions from (c,)z,y,x to c,y,x,z or h,w,d to ease 2D/3D-case handling - data = np.transpose(np.load(patient['img'], mmap_mode='r'), axes=(0, 2, 3, 1)) - seg = np.transpose(np.load(patient['seg'], mmap_mode='r'), axes=(1, 2, 0))[np.newaxis] - data_shp_raw = data.shape - plot_bg = data[self.cf.plot_bg_chan] if self.cf.plot_bg_chan not in self.chans else None - data = data[self.chans] - discarded_chans = len( - [c for c in np.setdiff1d(np.arange(data_shp_raw[0]), self.chans) if c < self.cf.plot_bg_chan]) - spatial_shp = data[0].shape # spatial dims need to be in order x,y,z - assert spatial_shp==seg[0].shape, "spatial shape incongruence betw. data and seg" - - if np.any([spatial_shp[i] < ps for i, ps in enumerate(self.patch_size)]): - new_shape = [np.max([spatial_shp[i], self.patch_size[i]]) for i in range(len(self.patch_size))] - data = dutils.pad_nd_image(data, new_shape) # use 'return_slicer' to crop image back to original shape. - seg = dutils.pad_nd_image(seg, new_shape) - if plot_bg is not None: - plot_bg = dutils.pad_nd_image(plot_bg, new_shape) - - if self.cf.dim == 3 or self.cf.merge_2D_to_3D_preds: - #adds the batch dim here bc won't go through MTaugmenter - out_data = data[np.newaxis] - out_seg = seg[np.newaxis] - if plot_bg is not None: - out_plot_bg = plot_bg[np.newaxis] - #data and seg shape: (1,c,x,y,z), where c=1 for seg - batch_3D = {'data': out_data, 'seg': out_seg} - for o in self.cf.roi_items: - batch_3D[o] = np.array([patient[o]]) - converter = ConvertSegToBoundingBoxCoordinates(3, self.cf.roi_items, False, self.cf.class_specific_seg) - batch_3D = converter(**batch_3D) - batch_3D.update({'patient_bb_target': batch_3D['bb_target'], 'original_img_shape': out_data.shape}) - for o in self.cf.roi_items: - batch_3D["patient_" + o] = batch_3D[o] - - if self.cf.dim == 2: - out_data = np.transpose(data, axes=(3,0,1,2)) #(c,y,x,z) to (b=z,c,x,y), use z=b as batchdim - out_seg = np.transpose(seg, axes=(3,0,1,2)).astype('uint8') #(c,y,x,z) to (b=z,c,x,y) - - batch_2D = {'data': out_data, 'seg': out_seg} - for o in self.cf.roi_items: - batch_2D[o] = np.repeat(np.array([patient[o]]), len(out_data), axis=0) - - converter = ConvertSegToBoundingBoxCoordinates(2, self.cf.roi_items, False, self.cf.class_specific_seg) - batch_2D = converter(**batch_2D) - - if plot_bg is not None: - out_plot_bg = np.transpose(plot_bg, axes=(2,0,1)).astype('float32') - - if self.cf.merge_2D_to_3D_preds: - batch_2D.update({'patient_bb_target': batch_3D['patient_bb_target'], - 'original_img_shape': out_data.shape}) - for o in self.cf.roi_items: - batch_2D["patient_" + o] = batch_3D['patient_'+o] - else: - batch_2D.update({'patient_bb_target': batch_2D['bb_target'], - 'original_img_shape': out_data.shape}) - for o in self.cf.roi_items: - batch_2D["patient_" + o] = batch_2D[o] - - out_batch = batch_3D if self.cf.dim == 3 else batch_2D - out_batch.update({'pid': np.array([patient['pid']] * len(out_data)), - 'spec':np.array([patient['spec']] * len(out_data))}) - - if self.cf.plot_bg_chan in self.chans and discarded_chans>0: - assert plot_bg is None - plot_bg = int(self.cf.plot_bg_chan - discarded_chans) - out_plot_bg = plot_bg - if plot_bg is not None: - out_batch['plot_bg'] = out_plot_bg - - #eventual tiling into patches - spatial_shp = out_batch["data"].shape[2:] - if np.any([spatial_shp[ix] > self.patch_size[ix] for ix in range(len(spatial_shp))]): - patient_batch = out_batch - #print("patientiterator produced patched batch!") - patch_crop_coords_list = dutils.get_patch_crop_coords(data[0], self.patch_size) - new_img_batch, new_seg_batch = [], [] - - for c in patch_crop_coords_list: - new_img_batch.append(data[:, c[0]:c[1], c[2]:c[3], c[4]:c[5]]) - seg_patch = seg[:, c[0]:c[1], c[2]: c[3], c[4]:c[5]] - new_seg_batch.append(seg_patch) - shps = [] - for arr in new_img_batch: - shps.append(arr.shape) - - data = np.array(new_img_batch) # (patches, c, x, y, z) - seg = np.array(new_seg_batch) - if self.cf.dim == 2: - # all patches have z dimension 1 (slices). discard dimension - data = data[..., 0] - seg = seg[..., 0] - patch_batch = {'data': data, 'seg': seg.astype('uint8'), - 'pid': np.array([patient['pid']] * data.shape[0]), - 'spec':np.array([patient['spec']] * data.shape[0])} - for o in self.cf.roi_items: - patch_batch[o] = np.repeat(np.array([patient[o]]), len(patch_crop_coords_list), axis=0) - # patient-wise (orig) batch info for putting the patches back together after prediction - for o in self.cf.roi_items: - patch_batch["patient_"+o] = patient_batch['patient_'+o] - patch_batch['patch_crop_coords'] = np.array(patch_crop_coords_list) - patch_batch['patient_bb_target'] = patient_batch['patient_bb_target'] - #patch_batch['patient_roi_labels'] = patient_batch['patient_roi_labels'] - patch_batch['patient_data'] = patient_batch['data'] - patch_batch['patient_seg'] = patient_batch['seg'] - patch_batch['original_img_shape'] = patient_batch['original_img_shape'] - if plot_bg is not None: - patch_batch['patient_plot_bg'] = patient_batch['plot_bg'] - - converter = ConvertSegToBoundingBoxCoordinates(self.cf.dim, self.cf.roi_items, False, self.cf.class_specific_seg) - - patch_batch = converter(**patch_batch) - out_batch = patch_batch - - self.patient_ix += 1 - # todo raise stopiteration when in test mode - if self.patient_ix == len(self.dataset_pids): - self.patient_ix = 0 - - return out_batch - - -def create_data_gen_pipeline(cf, patient_data, do_aug=True, sample_pids_w_replace=True): - """ - create mutli-threaded train/val/test batch generation and augmentation pipeline. - :param patient_data: dictionary containing one dictionary per patient in the train/test subset - :param test_pids: (optional) list of test patient ids, calls the test generator. - :param do_aug: (optional) whether to perform data augmentation (training) or not (validation/testing) - :return: multithreaded_generator - """ - data_gen = BatchGenerator(cf, patient_data, sample_pids_w_replace=sample_pids_w_replace) - - my_transforms = [] - if do_aug: - if cf.da_kwargs["mirror"]: - mirror_transform = Mirror(axes=cf.da_kwargs['mirror_axes']) - my_transforms.append(mirror_transform) - if cf.da_kwargs["gamma_transform"]: - gamma_transform = GammaTransform(gamma_range=cf.da_kwargs["gamma_range"], invert_image=False, - per_channel=False, retain_stats=True) - my_transforms.append(gamma_transform) - if cf.dim == 3: - # augmentations with desired effect on z-dimension - spatial_transform = SpatialTransform(patch_size=cf.patch_size, - patch_center_dist_from_border=cf.da_kwargs['rand_crop_dist'], - do_elastic_deform=False, - do_rotation=cf.da_kwargs['do_rotation'], angle_x=cf.da_kwargs['angle_x'], - angle_y=cf.da_kwargs['angle_y'], angle_z=cf.da_kwargs['angle_z'], - do_scale=cf.da_kwargs['do_scale'], scale=cf.da_kwargs['scale'], - random_crop=cf.da_kwargs['random_crop'], - border_mode_data=cf.da_kwargs['border_mode_data']) - my_transforms.append(spatial_transform) - # augmentations that are only meant to affect x-y - my_transforms.append(Convert3DTo2DTransform()) - spatial_transform = SpatialTransform(patch_size=cf.patch_size[:2], - patch_center_dist_from_border=cf.da_kwargs['rand_crop_dist'][:2], - do_elastic_deform=cf.da_kwargs['do_elastic_deform'], - alpha=cf.da_kwargs['alpha'], sigma=cf.da_kwargs['sigma'], - do_rotation=False, - do_scale=False, - random_crop=False, - border_mode_data=cf.da_kwargs['border_mode_data']) - my_transforms.append(spatial_transform) - my_transforms.append(Convert2DTo3DTransform()) - - else: - spatial_transform = SpatialTransform(patch_size=cf.patch_size[:cf.dim], - patch_center_dist_from_border=cf.da_kwargs['rand_crop_dist'][:2], - do_elastic_deform=cf.da_kwargs['do_elastic_deform'], - alpha=cf.da_kwargs['alpha'], sigma=cf.da_kwargs['sigma'], - do_rotation=cf.da_kwargs['do_rotation'], angle_x=cf.da_kwargs['angle_x'], - angle_y=cf.da_kwargs['angle_y'], angle_z=cf.da_kwargs['angle_z'], - do_scale=cf.da_kwargs['do_scale'], scale=cf.da_kwargs['scale'], - random_crop=cf.da_kwargs['random_crop'], - border_mode_data=cf.da_kwargs['border_mode_data']) - my_transforms.append(spatial_transform) - else: - my_transforms.append(CenterCropTransform(crop_size=cf.patch_size[:cf.dim])) - - if cf.create_bounding_box_targets: - my_transforms.append(ConvertSegToBoundingBoxCoordinates(cf.dim, cf.roi_items, False, cf.class_specific_seg)) - #batch receives entry 'bb_target' w bbox coordinates as [y1,x1,y2,x2,z1,z2]. - #my_transforms.append(ConvertSegToOnehotTransform(classes=range(cf.num_seg_classes))) - all_transforms = Compose(my_transforms) - #MTAugmenter creates iterator from data iterator data_gen after applying the composed transform all_transforms - multithreaded_generator = MultiThreadedAugmenter(data_gen, all_transforms, num_processes=cf.n_workers, - seeds=list(np.random.randint(0,cf.n_workers*2,size=cf.n_workers))) - return multithreaded_generator - -def get_train_generators(cf, logger, data_statistics=True): - """ - wrapper function for creating the training batch generator pipeline. returns the train/val generators - need to select cv folds on patient level, but be able to include both breasts of each patient. - """ - dataset = Dataset(cf, logger) - - dataset.init_FoldGenerator(cf.seed, cf.n_cv_splits) - dataset.generate_splits(check_file=os.path.join(cf.exp_dir, 'fold_ids.pickle')) - set_splits = dataset.fg.splits - - test_ids, val_ids = set_splits.pop(cf.fold), set_splits.pop(cf.fold-1) - train_ids = np.concatenate(set_splits, axis=0) - - if cf.held_out_test_set: - train_ids = np.concatenate((train_ids, test_ids), axis=0) - test_ids = [] - - train_data = {k: v for (k, v) in dataset.data.items() if k in train_ids} - val_data = {k: v for (k, v) in dataset.data.items() if k in val_ids} - - logger.info("data set loaded with: {} train / {} val / {} test patients".format(len(train_ids), len(val_ids), len(test_ids))) - if data_statistics: - dataset.calc_statistics(subsets={"train":train_ids, "val":val_ids, "test":test_ids}, - plot_dir=os.path.join(cf.plot_dir,"dataset")) - - batch_gen = {} - batch_gen['train'] = create_data_gen_pipeline(cf, train_data, do_aug=cf.do_aug) - batch_gen['val_sampling'] = create_data_gen_pipeline(cf, val_data, do_aug=False, sample_pids_w_replace=False) - - if cf.val_mode == 'val_patient': - batch_gen['val_patient'] = PatientBatchIterator(cf, val_data) - batch_gen['n_val'] = len(val_ids) if cf.max_val_patients=="all" else cf.max_val_patients - elif cf.val_mode == 'val_sampling': - batch_gen['n_val'] = cf.num_val_batches if cf.num_val_batches!="all" else len(val_ids) - - return batch_gen - -def get_test_generator(cf, logger): - """ - if get_test_generators is called multiple times in server env, every time of - Dataset initiation rsync will check for copying the data; this should be okay - since rsync will not copy if files already exist in destination. - """ - - if cf.held_out_test_set: - sourcedir = cf.test_data_sourcedir - test_ids = None - else: - sourcedir = None - with open(os.path.join(cf.exp_dir, 'fold_ids.pickle'), 'rb') as handle: - set_splits = pickle.load(handle) - test_ids = set_splits[cf.fold] - - test_set = Dataset(cf, logger, test_ids, data_sourcedir=sourcedir) - logger.info("data set loaded with: {} test patients".format(len(test_set.set_ids))) - batch_gen = {} - batch_gen['test'] = PatientBatchIterator(cf, test_set.data) - batch_gen['n_test'] = len(test_set.set_ids) if cf.max_test_patients=="all" else min(cf.max_test_patients, len(test_set.set_ids)) - - return batch_gen - - -if __name__=="__main__": - import sys - sys.path.append('../') # works on cluster indep from where sbatch job is started - import plotting as plg - import utils.exp_utils as utils - from configs import Configs - cf = configs() - - total_stime = time.time() - times = {} - - #cf.server_env = True - #cf.data_dir = "experiments/dev_data" - - #dataset = Dataset(cf) - #patient = dataset['Master_00018'] - cf.exp_dir = "experiments/dev/" - cf.plot_dir = cf.exp_dir+"plots" - os.makedirs(cf.exp_dir, exist_ok=True) - cf.fold = 0 - logger = utils.get_logger(cf.exp_dir) - gens = get_train_generators(cf, logger) - train_loader = gens['train'] - - #for i in range(train_loader.dataset_length): - # print("batch", i) - stime = time.time() - ex_batch = next(train_loader) - #ex_batch = next(train_loader) - times["train_batch"] = time.time()-stime - plg.view_batch(cf, ex_batch, out_file="experiments/dev/dev_exbatch.png", show_gt_labels=True) - - #with open(os.path.join(cf.exp_dir, "fold_"+str(cf.fold), "BatchGenerator_stats.txt"), mode="w") as file: - # train_loader.generator.print_stats(logger, file) - - - val_loader = gens['val_sampling'] - stime = time.time() - ex_batch = next(val_loader) - times["val_batch"] = time.time()-stime - stime = time.time() - plg.view_batch(cf, ex_batch, out_file="experiments/dev/dev_exvalbatch.png", show_gt_labels=True, plot_mods=False, show_info=False) - times["val_plot"] = time.time()-stime - - test_loader = get_test_generator(cf, logger)["test"] - stime = time.time() - ex_batch = test_loader.generate_train_batch() - print(ex_batch["data"].shape) - times["test_batch"] = time.time()-stime - stime = time.time() - plg.view_batch(cf, ex_batch, show_gt_labels=True, out_file="experiments/dev/ex_patchbatch.png", show_gt_boxes=False, show_info=False, dpi=400, sample_picks=[2,5], plot_mods=False) - times["test_patchbatch_plot"] = time.time()-stime - - #stime = time.time() - #ex_batch['data'] = ex_batch['patient_data'] - #ex_batch['seg'] = ex_batch['patient_seg'] - #if 'patient_plot_bg' in ex_batch.keys(): - # ex_batch['plot_bg'] = ex_batch['patient_plot_bg'] - #plg.view_batch(cf, ex_batch, show_gt_labels=True, out_file="experiments/dev/dev_expatchbatch.png") - #times["test_patientbatch_plot"] = time.time() - stime - - - #print("patch batch keys", ex_batch.keys()) - #print("patch batch les gle", ex_batch["lesion_gleasons"].shape) - #print("patch batch gsbx", ex_batch["GSBx"].shape) - #print("patch batch class_targ", ex_batch["class_targets"].shape) - #print("patient b roi labels", ex_batch["patient_roi_labels"].shape) - #print("patient les gleas", ex_batch["patient_lesion_gleasons"].shape) - #print("patch&patient batch pid", ex_batch["pid"], len(ex_batch["pid"])) - #print("unique patient_seg", np.unique(ex_batch["patient_seg"])) - #print("pb patient roi labels", len(ex_batch["patient_roi_labels"]), ex_batch["patient_roi_labels"]) - #print("pid", ex_batch["pid"]) - - #patient_batch = {k[len("patient_"):]:v for (k,v) in ex_batch.items() if k.lower().startswith("patient")} - #patient_batch["pid"] = ex_batch["pid"] - #stime = time.time() - #plg.view_batch(cf, patient_batch, out_file="experiments/dev_expatientbatch") - #times["test_plot"] = time.time()-stime - - - print("Times recorded throughout:") - for (k,v) in times.items(): - print(k, "{:.2f}".format(v)) - - mins, secs = divmod((time.time() - total_stime), 60) - h, mins = divmod(mins, 60) - t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) - print("{} total runtime: {}".format(os.path.split(__file__)[1], t)) \ No newline at end of file diff --git a/datasets/prostate/data_preprocessing.py b/datasets/prostate/data_preprocessing.py deleted file mode 100644 index ca97532..0000000 --- a/datasets/prostate/data_preprocessing.py +++ /dev/null @@ -1,809 +0,0 @@ -__author__ = "Simon Kohl, Gregor Ramien" - - -# subject-wise extractor that does not depend on Prisma/Radval and that checks for geometry miss-alignments -# (corrects them if applicable), images and masks should be stored separately, each in its own memmap -# at run-time, the data-loaders will assemble dicts using the histo csvs -import os -import sys -from multiprocessing import Pool -import warnings -import time -import shutil - -import pandas as pd -import numpy as np -import pickle - -import SimpleITK as sitk -from scipy.ndimage.measurements import center_of_mass - -sys.path.append("../") -import plotting as plg -import data_manager as dmanager - -def save_obj(obj, name): - """Pickle a python object.""" - with open(name + '.pkl', 'wb') as f: - pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) - -def load_array(path): - """Load an image as a numpy array.""" - img = sitk.ReadImage(path) - return sitk.GetArrayFromImage(img) - -def id_to_spec(id, base_spec): - """Construct subject specifier from base string and an integer subject number.""" - num_zeros = 5 - len(str(id)) - assert num_zeros>=0, "id_to_spec: patient id too long to fit into 5 figures" - return base_spec + '_' + ('').join(['0'] * num_zeros) + str(id) - -def spec_to_id(spec): - """Get subject id from string""" - return int(spec[-5:]) - -def has_equal_geometry(img1, img2, precision=0.001): - """Check whether geometries of 2 images match within a given precision.""" - equal = True - - # assert equal image extentions - delta = [abs((img1.GetSize()[i] - img2.GetSize()[i])) < precision for i in range(3)] - if not np.all(delta): - equal = False - - # assert equal origins - delta = [abs((img1.GetOrigin()[i] - img2.GetOrigin()[i])) < precision for i in range(3)] - if not np.all(delta): - equal = False - - # assert equal spacings - delta = [abs((img1.GetSpacing()[i] - img2.GetSpacing()[i])) < precision for i in range(3)] - if not np.all(delta): - equal = False - - return equal - -def resample_to_reference(ref_img, img, interpolation): - """ - Resample an sitk image to a reference image, the size, spacing, - origin and direction of the reference image will be used - :param ref_img: - :param img: - :param interpolation: - :return: interpolated SITK image - """ - if interpolation == 'nearest': - interpolator = sitk.sitkNearestNeighbor #these are just integers - elif interpolation == 'linear': - interpolator = sitk.sitkLinear - elif interpolation == 'bspline': - # basis spline of order 3 - interpolator = sitk.sitkBSpline - else: - raise NotImplementedError('Interpolation of type {} not implemented!'.format(interpolation)) - - img = sitk.Cast(img, sitk.sitkFloat64) - - rif = sitk.ResampleImageFilter() - # set the output size, origin, spacing and direction to that of the provided image - rif.SetReferenceImage(ref_img) - rif.SetInterpolator(interpolator) - - return rif.Execute(img) - -def rescale(img, scaling, interpolation=sitk.sitkBSpline, out_fpath=None): - """ - :param scaling: tuple (z_scale, y_scale, x_scale) of scaling factors - :param out_fpath: filepath (incl filename), if set will write .nrrd (uncompressed) - to that location - - sitk/nrrd images spacing: imgs are treated as physical objects. When resampling, - a given image is re-evaluated (resampled) at given gridpoints, the physical - properties of the image don't change. Hence, if the resampling-grid has a smaller - spacing than the original image(grid), the image is sampled more often than before. - Since every sampling produces one pixel, the resampled image will have more pixels - (when sampled at undefined points of the image grid, the sample values will be - interpolated). I.e., for an upsampling of an image, we need to set a smaller - spacing for the resampling grid and a larger (pixel)size for the resampled image. - """ - (z,y,x) = scaling - - old_size = np.array(img.GetSize()) - old_spacing = np.array(img.GetSpacing()) - - - new_size = (int(old_size[0]*x), int(old_size[1]*y), int(old_size[2]*z)) - new_spacing = old_spacing * (old_size/ new_size) - - rif = sitk.ResampleImageFilter() - - rif.SetReferenceImage(img) - rif.SetInterpolator(interpolation) - rif.SetOutputSpacing(new_spacing) - rif.SetSize(new_size) - - new_img = rif.Execute(img) - - if not out_fpath is None: - writer = sitk.ImageFileWriter() - writer.SetFileName(out_fpath) - writer.SetUseCompression(True) - writer.Execute(new_img) - - return new_img - -def get_valid_z_range(arr): - """ - check which z-slices of an image array aren't constant - :param arr: - :return: min and max valid slice found; under the assumption that invalid - slices occur never inbetween valid slices - """ - - valid_z_slices = [] - for z in range(arr.shape[0]): - if np.var(arr[z]) != 0: - valid_z_slices.append(z) - return valid_z_slices[0], valid_z_slices[-1] - -def convert_to_arrays(data): - """convert to numpy arrays. - sitk.Images have shape (x,y,z), but GetArrayFromImage returns shape (z,y,x) - """ - for mod in data['img'].keys(): - data['img'][mod] = sitk.GetArrayFromImage(data['img'][mod]).astype(np.float32) - - for mask in data['anatomical_masks'].keys(): - data['anatomical_masks'][mask] = sitk.GetArrayFromImage(data['anatomical_masks'][mask]).astype(np.uint8) - - for mask in data['lesions'].keys(): - data['lesions'][mask] = sitk.GetArrayFromImage(data['lesions'][mask]).astype(np.uint8) - return data - -def merge_crossmod_masks(data, rename_tags, mode="union"): - """if data has multiple ground truths (e.g. after registration), merge - masks by mode. class labels (leason gleason) are assumed to be naturally registered (no ambiguity) - :param rename_tags: usually from prepro_cf['rename_tags'] - :param mode: 'union' or name of mod ('adc', 't2') to consider only one gt - """ - - if 'adc' in data['img'].keys() and 't2' in data['img'].keys(): - if mode=='union': - #print("Merging gts of T2, ADC mods. Assuming data is registered!") - tags = list(data["anatomical_masks"].keys()) - for tag in tags: - tags.remove(tag) - merge_with = [mtag for mtag in tags\ - if mtag.lower().split("_")[2]==tag.lower().split("_")[2]] - assert len(merge_with)==1, "attempted to merge {} ground truths".format(len(merge_with)) - merge_with = merge_with[0] - tags.remove(merge_with) - #masks are binary - #will throw error if masks dont have same shape - data["anatomical_masks"][tag] = np.logical_or(data["anatomical_masks"][tag].astype(np.uint8), - data["anatomical_masks"].pop(merge_with).astype(np.uint8)).astype(np.uint8) - - tags = list(data["lesions"].keys()) - for tag in tags: - tags.remove(tag) - merge_with = [mtag for mtag in tags\ - if mtag.lower().split("_")[2]==tag.lower().split("_")[2]] - assert len(merge_with)==1, "attempted to merge {} ground truths".format(len(merge_with)) - merge_with = merge_with[0] - tags.remove(merge_with) - data["lesions"][tag] = np.logical_or(data["lesions"][tag], - data["lesions"].pop(merge_with)).astype(np.uint8) - - elif mode=='adc' or mode=='t2': - data["anatomical_masks"] = {tag:v for tag,v in data["anatomical_masks"].items() if - tag.lower().split("_")[1]==mode} - data["lesions"] = {tag: v for tag, v in data["lesions"].items() if tag.lower().split("_")[1] == mode} - - else: - raise Exception("cross-mod gt merge mode {} not implemented".format(mode)) - - for tag in list(data["anatomical_masks"]): - data["anatomical_masks"][rename_tags[tag]] = data["anatomical_masks"].pop(tag) - #del data["anatomical_masks"][tag] - for tag in list(data["lesions"]): - new_tag = "seg_REG_"+"".join(tag.split("_")[2:]) - data["lesions"][new_tag] = data["lesions"].pop(tag) - data["lesion_gleasons"][new_tag] = data["lesion_gleasons"].pop(tag) - - return data - -def crop_3D(data, pre_crop_size, center_of_mass_crop=True): - pre_crop_size = np.array(pre_crop_size) - # restrain z-ranges to where ADC has valid entries - if 'adc' in data['img'].keys(): - ref_mod = 'adc' - comp_mod = 't2' - else: - ref_mod = 't2' - comp_mod = 'adc' - min_z, max_z = get_valid_z_range(data['img'][ref_mod]) - if comp_mod in data['img'].keys(): - assert (min_z, max_z) == get_valid_z_range(data['img'][comp_mod]), "adc, t2 different valid z range" - - if center_of_mass_crop: - # cut the arrays to the desired x_y_crop_size around the center-of-mass of the PRO segmentation - pro_com = center_of_mass(data['anatomical_masks']['pro']) - center = [int(np.round(i, 0)) for i in pro_com] - else: - center = [data['img'][ref_mod].shape[i] // 2 for i in range(3)] - - - l = pre_crop_size // 2 - #z_low, z_up = max(min_z, center[0] - l[0]), min(max_z + 1, center[0] + l[0]) - z_low, z_up = center[0] - l[0], center[0] + l[0] - while z_lowmax_z+1: - if z_lowmax_z+1: - warnings.warn("could not crop patient {}'s z-dim to demanded size.".format(data['Original_ID'])) - if z_up>max_z+1: - z_low -= 1 - z_up -= 1 - if z_low=0),\ - "Precropsize too large for image dimensions by {} pixels in patient {}".format(d, data['Original_ID']) - - for mod in data['img'].keys(): - data['img'][mod] = data['img'][mod][z_low:z_up, center[1]-l[1]: center[1] + l[1], center[2]-l[2]: center[2]+l[2]] - vals_lst = list(data['img'].values()) - assert np.all([mod.shape==vals_lst[0].shape for mod in vals_lst]),\ - "produced modalities for same subject with different shapes" - - for mask in data['anatomical_masks'].keys(): - data['anatomical_masks'][mask] = data['anatomical_masks'][mask] \ - [z_low:z_up, center[1]-l[1]: center[1]+l[1], center[2]-l[2]: center[2]+l[2]] - - for mask in data['lesions'].keys(): - data['lesions'][mask] = data['lesions'][mask] \ - [z_low:z_up, center[1]-l[1]: center[1]+l[1], center[2]-l[2]: center[2]+l[2]] - return data - -def add_transitional_zone_mask(data): - if 'pro' in data['anatomical_masks'] and 'pz' in data['anatomical_masks']: - intersection = data['anatomical_masks']['pro'] & data['anatomical_masks']['pz'] - data['anatomical_masks']['tz'] = data['anatomical_masks']['pro'] - intersection - return data - -def generate_labels(data, seg_labels, class_labels, gleason_map, observables_rois): - """merge individual binary labels to an integer label mask and create class labels from Gleason score. - if seg_labels has seg_label 'roi': seg label will be roi count. - """ - anatomical_masks2label = [l for l in data['anatomical_masks'].keys() if l in seg_labels.keys()] - - data['seg'] = np.zeros(shape=data['anatomical_masks']['pro'].shape, dtype=np.uint8) - data['roi_classes'] = [] - #data['roi_observables']: dict, each entry is one list of length final roi_count in this patient - data['roi_observables'] = {obs:[] for obs in observables_rois} - roi_count = 0 - - for mask in anatomical_masks2label: - ixs = np.where(data['anatomical_masks'][mask]) - roi_class = class_labels[mask] - if len(ixs)>0 and roi_class!=-1: - roi_count+=1 - label = seg_labels[mask] - if label=='roi': - label = roi_count - data['seg'][ixs] = label - data['roi_classes'].append(roi_class) - for obs in observables_rois: - obs_val = data[obs][mask] if mask in data[obs].keys() else None - data['roi_observables'][obs].append(obs_val) - #print("appended mask lab", class_labels[mask]) - - if "lesions" in seg_labels.keys(): - for lesion_key, lesion_mask in data['lesions'].items(): - ixs = np.where(lesion_mask) - roi_class = class_labels['lesions'] - if roi_class == "gleason": - roi_class = gleason_map(data['lesion_gleasons'][lesion_key]) - # roi_class = data['lesion_gleasons'][lesion_key] - if len(ixs)>0 and roi_class!=-1: - roi_count+=1 - label = seg_labels['lesions'] - if label=='roi': - label = roi_count - data['seg'][ixs] = label - #segs have form: slices x h x w, i.e., one channel per z-slice, each lesion has its own label - data['roi_classes'].append(roi_class) - for obs in observables_rois: - obs_val = data[obs][lesion_key] if lesion_key in data[obs].keys() else None - data['roi_observables'][obs].append(obs_val) - - # data['lesion_gleasons'][label] = data['lesion_gleasons'].pop(lesion_key) - for obs in data['roi_observables'].keys(): - del data[obs] - return data - -def normalize_image(data, normalization_dict): - """normalize the full image.""" - percentiles = normalization_dict['percentiles'] - for mod in data['img'].keys(): - p = np.percentile(data['img'][mod], percentiles[0]) - q = np.percentile(data['img'][mod], percentiles[1]) - masked_img = data['img'][mod][(data['img'][mod] > p) & (data['img'][mod] < q)] - data['img'][mod] = (data['img'][mod] - np.median(masked_img)) / np.std(masked_img) - return data - -def concat_mods(data, mods2concat): - """concat modalities on new first channel - """ - concat_on_channel = [] #holds tmp data to be concatenated on the same channel - for mod in mods2concat: - mod_img = data['img'][mod][np.newaxis] - concat_on_channel.append(mod_img) - data['img'] = np.concatenate(concat_on_channel, axis=0) - - return data - -def swap_yx(data, apply_flag): - """swap x and y axes in img and seg - """ - if apply_flag: - data["img"] = np.swapaxes(data["img"], -1,-2) - data["seg"] = np.swapaxes(data["seg"], -1,-2) - - return data - -def get_fg_z_indices(seg): - """return z-indices of array at which the x-y-arrays have labels!=0, 0 is background - """ - fg_slices = np.argwhere(seg.astype(int))[:,0] - fg_slices = np.unique(fg_slices) - return fg_slices - - -class Preprocessor(): - - def __init__(self, config): - - self._config_path = config.config_path - self.full_cf = config - self._cf = config.prepro - - def get_excluded_master_ids(self): - """Get the Master IDs that are excluded from their corresponding Prisma/Radval/Master IDs.""" - - excluded_prisma = self._cf['excluded_prisma_subjects'] - excluded_radval = self._cf['excluded_radval_subjects'] - excluded_master = self._cf['excluded_master_subjects'] - histo = self._histo_patient_based - - excluded_master_ids = [] - - if len(excluded_prisma) > 0: - for prisma_id in excluded_prisma: - master_spec = histo['Master_ID'][histo['Original_ID'] == id_to_spec(prisma_id, 'Prisma')].values[0] - excluded_master_ids.append(spec_to_id(master_spec)) - - if len(excluded_radval) > 0: - for radval_id in excluded_radval: - master_spec = histo['Master_ID'][histo['Original_ID'] == id_to_spec(radval_id, 'Radiology')].values[0] - excluded_master_ids.append(spec_to_id(master_spec)) - - excluded_master_ids += excluded_master - - return excluded_master_ids - - - def prepare_filenames(self): - """check whether histology-backed subjects and lesions are available in the data and - yield dict of subject file-paths.""" - - # assemble list of histology-backed subject ids and check that corresponding images are available - self._histo_lesion_based = pd.read_csv(os.path.join(self._cf['histo_dir'], self._cf['histo_lesion_based'])) - self._histo_patient_based = pd.read_csv(os.path.join(self._cf['histo_dir'], self._cf['histo_patient_based'])) - - excluded_master_ids = self.get_excluded_master_ids() - self._subj_ids = np.unique(self._histo_lesion_based[self._cf['histo_id_column_name']].values) - self._subj_ids = [s for s in self._subj_ids.tolist() if - s not in excluded_master_ids] - - # get subject directory paths from - img_paths = os.listdir(self._cf['data_dir']) - self._img_paths = [p for p in img_paths if 'Master' in p and len(p) == len('Master') + 6] - - # check that all images of subjects with histology are available - available_subj_ids = np.array([spec_to_id(s) for s in self._img_paths]) - self._missing_image_ids = np.setdiff1d(self._subj_ids, available_subj_ids) - - assert len(self._missing_image_ids)== 0,\ - 'Images of subjs {} are not available.'.format(self._missing_image_ids) - - # make dict holding relevant paths to data of each subject - self._paths_by_subject = {} - for s in self._subj_ids: - self._paths_by_subject[s] = self.load_subject_paths(s) - - - def load_subject_paths(self, subject_id): - """Make dict holding relevant paths to data of a given subject.""" - dir_spec = self._cf['dir_spec'] - s_dict = {} - - # iterate images - images_paths = {} - for kind, filename in self._cf['images'].items(): - filename += self._cf['img_postfix']+self._cf['overall_postfix'] - images_paths[kind] = os.path.join(self._cf['data_dir'], id_to_spec(subject_id, dir_spec), filename) - s_dict['images'] = images_paths - - # iterate anatomical structures - anatomical_masks_paths = {} - for tag in self._cf['anatomical_masks']: - filename = tag + self._cf['overall_postfix'] - anatomical_masks_paths[tag] = os.path.join(self._cf['data_dir'], id_to_spec(subject_id, dir_spec), filename) - s_dict['anatomical_masks'] = anatomical_masks_paths - - # iterate lesions - lesion_names = [] - if 'adc' in self._cf['images']: - lesion_names.extend(self._histo_lesion_based[self._histo_lesion_based[self._cf['histo_id_column_name']]\ - == subject_id]['segmentationsNameADC'].dropna()) - if 't2' in self._cf['images']: - lesion_names.extend(self._histo_lesion_based[self._histo_lesion_based[self._cf['histo_id_column_name']]\ - == subject_id]['segmentationsNameT2'].dropna()) - lesion_paths = {} - for l in lesion_names: - lesion_path = os.path.join(self._cf['data_dir'], id_to_spec(subject_id, dir_spec), - l+self._cf['lesion_postfix']+self._cf['overall_postfix']) - assert os.path.isfile(lesion_path), 'Lesion mask not found under {}!'.format(lesion_path) - - lesion_paths[l] = lesion_path - - s_dict['lesions'] = lesion_paths - return s_dict - - - def load_subject_data(self, subject_id): - """load img data, masks, histo data for a single subject.""" - subj_paths = self._paths_by_subject[subject_id] - data = {} - - # iterate images - data['img'] = {} - for mod in subj_paths['images']: - data['img'][mod] = sitk.ReadImage(subj_paths['images'][mod]) - - # iterate anatomical masks - data['anatomical_masks'] = {} - for tag in subj_paths['anatomical_masks']: - data['anatomical_masks'][tag] = sitk.ReadImage(subj_paths['anatomical_masks'][tag]) - - # iterate lesions, include gleason score - data['lesions'] = {} - data['lesion_gleasons'] = {} - idcol = self._cf['histo_id_column_name'] - subj_histo = self._histo_lesion_based[self._histo_lesion_based[idcol]==subject_id] - for l in subj_paths['lesions']: - #print("subjpaths lesions l ", l) - data['lesions'][l] = sitk.ReadImage(subj_paths['lesions'][l]) - - try: - gleason = subj_histo[subj_histo["segmentationsNameADC"]==l]["Gleason"].tolist()[0] - except IndexError: - gleason = subj_histo[subj_histo["segmentationsNameT2"]==l]["Gleason"].tolist()[0] - - data['lesion_gleasons'][l] = gleason - - # add other subj-specific histo and id data - idcol = self._cf['histo_pb_id_column_name'] - subj_histo = self._histo_patient_based[self._histo_patient_based[idcol]==subject_id] - for d in self._cf['observables_patient']: - data[d] = subj_histo[d].values - - return data - - def analyze_subject_data(self, data): - """record post-alignment geometries.""" - - ref_mods = data['img'].keys() - geos = {} - for ref_mod in ref_mods: - geos[ref_mod] = {'size': data['img'][ref_mod].GetSize(), 'origin': data['img'][ref_mod].GetOrigin(), - 'spacing': data['img'][ref_mod].GetSpacing()} - - return geos - - def process_subject_data(self, data): - """evtly rescale images, check for geometry miss-alignments and perform crop.""" - - if not self._cf['mod_scaling'] == (1,1,1): - for img_name in data['img']: - res_img = rescale(data["img"][img_name], self._cf['mod_scaling']) - data['img'][img_name] = res_img - - #----check geometry alignment between masks and image--- - for tag in self._cf['anatomical_masks']: - if tag.lower().startswith("seg_adc"): - ref_mod = 'adc' - elif tag.lower().startswith("seg_t2"): - ref_mod = 't2' - if not has_equal_geometry(data['img'][ref_mod], data['anatomical_masks'][tag]): - #print("bef", np.unique(sitk.GetArrayFromImage(data['anatomical_masks'][tag]))) - #print('Geometry mismatch: {}, {} is resampled to its image geometry!'.format(data["Original_ID"], tag)) - data['anatomical_masks'][tag] =\ - resample_to_reference(data['img'][ref_mod], data['anatomical_masks'][tag], - interpolation=self._cf['interpolation']) - #print("aft", np.unique(sitk.GetArrayFromImage(data['anatomical_masks'][tag]))) - - for tag in data['lesions'].keys(): - if tag.lower().startswith("seg_adc"): - ref_mod = 'adc' - elif tag.lower().startswith("seg_t2"): - ref_mod = 't2' - if not has_equal_geometry(data['img'][ref_mod], data['lesions'][tag]): - #print('Geometry mismatch: {}, {} is resampled to its image geometry!'.format(data["Original_ID"], tag)) - #print("pre-sampling data type: {}".format(data['lesions'][tag])) - data['lesions'][tag] = resample_to_reference(data['img'][ref_mod], data['lesions'][tag], - interpolation=self._cf['interpolation']) - - - data = convert_to_arrays(data) - data = merge_crossmod_masks(data, self._cf['rename_tags'], mode=self._cf['merge_mode']) - data = crop_3D(data, self._cf['pre_crop_size'], self._cf['center_of_mass_crop']) - data = add_transitional_zone_mask(data) - data = generate_labels(data, self._cf['seg_labels'], self._cf['class_labels'], self._cf['gleason_map'], - self._cf['observables_rois']) - data = normalize_image(data, self._cf['normalization']) - data = concat_mods(data, self._cf['modalities2concat']) - data = swap_yx(data, self._cf["swap_yx_to_xy"]) - - data['fg_slices'] = get_fg_z_indices(data['seg']) - - return data - - def write_subject_arrays(self, data, subject_spec): - """Write arrays to disk and save file names in dict.""" - - out_dir = self._cf['output_directory'] - os.makedirs(out_dir, exist_ok=True) #might throw error if restrictive permissions - - out_dict = {} - - # image(s) - name = subject_spec + '_imgs.npy' - np.save(os.path.join(out_dir, name), data['img']) - out_dict['img'] = name - - # merged labels - name = subject_spec + '_merged_seg.npy' - np.save(os.path.join(out_dir, name), data['seg']) - out_dict['seg'] = name - - # anatomical masks separately - #for mask in list(data['anatomical_masks'].keys()) + (['tz'] if 'tz' in data.keys() else []): - # name = subject_spec + '_{}.npy'.format(mask) - # np.save(os.path.join(out_dir, name), data['anatomical_masks'][mask]) - # out_dict[mask] = name - - # lesion masks and lesion classes separately - #out_dict['lesion_gleasons'] = {} - #for mask in data['lesions'].keys(): - # name = subject_spec + '_{}.npy'.format(mask) - # np.save(os.path.join(out_dir, name), data['lesions'][mask]) - # out_dict[mask] = name - # out_dict['lesion_gleasons'][int(mask[-1])] = data['lesion_gleasons'][int(mask[-1])] - - # roi classes - out_dict['roi_classes'] = data['roi_classes'] - - - # fg_slices info - out_dict['fg_slices'] = data['fg_slices'] - - # other observables - for obs in self._cf['observables_patient']: - out_dict[obs] = data[obs] - for obs in data['roi_observables'].keys(): - out_dict[obs] = data['roi_observables'][obs] - #print("subj outdict ", out_dict.keys()) - return out_dict - - def subject_iteration(self, subj_id): #single iteration, wrapped for pooling - data = self.load_subject_data(subj_id) - data = self.process_subject_data(data) - subj_out_dict = self.write_subject_arrays(data, id_to_spec(subj_id, self._cf['dir_spec'])) - - print('Processed subject {}.'.format(id_to_spec(subj_id, self._cf['dir_spec']))) - - return (subj_id, subj_out_dict) - - def iterate_subjects(self, ids_subset=None, processes=6): - """process all subjects.""" - - if ids_subset is None: - ids_subset = self._subj_ids - else: - ids_subset = np.array(ids_subset) - id_check = np.array([id in self._subj_ids for id in ids_subset]) - assert np.all(id_check), "pids {} not in eligible pids".format(ids_subset[np.invert(id_check)]) - - p = Pool(processes) - subj_out_dicts = p.map(self.subject_iteration, ids_subset) - """note on Pool.map: only takes one arg, pickles the function for execution --> - cannot write to variables defined outside local scope --> cannot write to - self.variables, therefore need to return single subj_out_dicts and join after; - however p.map can access object methods via self.method(). - Is a bit complicated, but speedup is huge. - """ - p.close() - p.join() - assert len(subj_out_dicts)==len(ids_subset), "produced less subject dicts than demanded" - self._info_dict = {id:dic for (id, dic) in subj_out_dicts} - - return - - def subject_analysis(self, subj_id): # single iteration, wrapped for pooling - data = self.load_subject_data(subj_id) - analysis = self.analyze_subject_data(data) - - print('Analyzed subject {}.'.format(id_to_spec(subj_id, self._cf['dir_spec']))) - - return (subj_id, analysis) - - def analyze_subjects(self, ids_subset=None, processes=os.cpu_count()): - """process all subjects.""" - - if ids_subset is None: - ids_subset = self._subj_ids - else: - ids_subset = np.array(ids_subset) - id_check = np.array([id in self._subj_ids for id in ids_subset]) - assert np.all(id_check), "pids {} not in eligible pids".format(ids_subset[np.invert(id_check)]) - - p = Pool(processes) - subj_analyses = p.map(self.subject_analysis, ids_subset) - """note on Pool.map: only takes one arg, pickles the function for execution --> - cannot write to variables defined outside local scope --> cannot write to - self.variables, therefore need to return single subj_out_dicts and join after; - however p.map can access object methods via self.method(). - Is a bit complicated, but speedup is huge. - """ - p.close() - p.join() - - df = pd.DataFrame(columns=['id', 'mod', 'size', 'origin', 'spacing']) - for subj_id, analysis in subj_analyses: - for mod, geo in analysis.items(): - df.loc[len(df)] = [subj_id, mod, np.array(geo['size']), np.array(geo['origin']), np.array(geo['spacing'])] - - os.makedirs(self._cf['output_directory'], exist_ok=True) - df.to_csv(os.path.join(self._cf['output_directory'], "analysis_df")) - - print("\nOver all mods") - print("Size mean {}\u00B1{}".format(df['size'].mean(), np.std(df['size'].values))) - print("Origin mean {}\u00B1{}".format(df['origin'].mean(), np.std(df['origin'].values))) - print("Spacing mean {}\u00B1{}".format(df['spacing'].mean(), np.std(df['spacing'].values))) - print("-----------------------------------------\n") - - for mod in df['mod'].unique(): - print("\nModality: {}".format(mod)) - mod_df = df[df['mod']==mod] - print("Size mean {}\u00B1{}".format(mod_df['size'].mean(), np.std(mod_df['size'].values))) - print("Origin mean {}\u00B1{}".format(mod_df['origin'].mean(), np.std(mod_df['origin'].values))) - print("Spacing mean {}\u00B1{}".format(mod_df['spacing'].mean(), np.std(mod_df['spacing'].values))) - print("-----------------------------------------\n") - return - - - def dump_class_labels(self, out_dir): - """save used GS mapping and class labels to file. - will likely not work if non-lesion classes (anatomy) are contained - """ - #if "gleason_thresh" in self._cf.keys(): - possible_gs = {gs for p_dict in self._info_dict.values() for gs in p_dict['lesion_gleasons']} - gs_mapping_inv = [(self._cf["gleason_map"](gs)+1, gs) for gs in possible_gs] - #elif "gleason_mapping" in self._cf.keys(): - #gs_mapping_inv = [(val + 1, key) for (key, val) in self._cf["gleason_mapping"].items() if val != -1] - classes = {pair[0] for pair in gs_mapping_inv} - groups = [[pair[1] for pair in gs_mapping_inv if pair[0]==cl] for cl in classes] - gr_names = [ "GS{}-{}".format(min(gr), max(gr)) if len(gr)>1 else "GS"+str(*gr) for gr in groups ] - if "color_palette" in self._cf.keys(): - class_labels = {cl: {"gleasons": groups[ix], "name": gr_names[ix], "color": self._cf["color_palette"][ix]} - for ix, cl in enumerate(classes) } - else: - class_labels = {cl: {"gleasons": groups[ix], "name": gr_names[ix], "color": self.full_cf.color_palette[ix]} - for ix, cl in enumerate(classes)} - - save_obj(class_labels, os.path.join(out_dir,"pp_class_labels")) - - - - def save_and_finish(self): - """copy config and used code to out_dir.""" - - out_dir = self._cf['output_directory'] - - # save script - current_script = os.path.realpath(__file__) - shutil.copyfile(current_script, os.path.join(out_dir, 'applied_preprocessing.py')) - - # save config - if self._config_path[-1] == 'c': - self._config_path = self._config_path[:-1] - shutil.copyfile(self._config_path, os.path.join(out_dir, 'applied_config.py')) - - #copy histo data to local dir - lbased = self._cf['histo_lesion_based'] - pbased = self._cf['histo_patient_based'] - os.makedirs(self._cf['histo_dir_out'], exist_ok=True) - shutil.copyfile(self._cf['histo_dir']+lbased, self._cf['histo_dir_out']+lbased) - shutil.copyfile(self._cf['histo_dir']+pbased, self._cf['histo_dir_out']+pbased) - - # save info dict - #print("info dict ", self._info_dict) - save_obj(self._info_dict, self._cf['info_dict_path'][:-4]) - self.dump_class_labels(out_dir) - - return - - def convert_copy_npz(self): - if not self._cf["npz_dir"]: - return - print("npz dir", self._cf['npz_dir']) - os.makedirs(self._cf['npz_dir'], exist_ok=True) - save_obj(self._info_dict, os.path.join(self._cf['npz_dir'], - self._cf['info_dict_path'].split("/")[-1][:-4])) - lbased = self._cf['histo_lesion_based'] - pbased = self._cf['histo_patient_based'] - histo_out = os.path.join(self._cf['npz_dir'], "histos/") - print("histo dir", histo_out) - os.makedirs(histo_out, exist_ok=True) - shutil.copyfile(self._cf['histo_dir']+lbased, histo_out+lbased) - shutil.copyfile(self._cf['histo_dir']+pbased, histo_out+pbased) - shutil.copyfile(os.path.join(self._cf['output_directory'], 'applied_config.py'), - os.path.join(self._cf['npz_dir'], 'applied_config.py')) - shutil.copyfile(os.path.join(self._cf['output_directory'], 'applied_preprocessing.py'), - os.path.join(self._cf['npz_dir'], 'applied_preprocessing.py')) - shutil.copyfile(os.path.join(self._cf['output_directory'], 'pp_class_labels.pkl'), - os.path.join(self._cf['npz_dir'], 'pp_class_labels.pkl')) - - dmanager.pack_dataset(self._cf["output_directory"], self._cf["npz_dir"], recursive=True) - - - - - -if __name__ == "__main__": - - stime = time.time() - - from configs import Configs - cf = configs() - - - pp = Preprocessor(config=cf) - pp.prepare_filenames() - #pp.analyze_subjects(ids_subset=None)#[1,2,3]) - pp.iterate_subjects(ids_subset=None, processes=os.cpu_count()) - pp.save_and_finish() - pp.convert_copy_npz() - - - #patient_id = 17 - #data = pp.load_subject_data(patient_id) - #data = pp.process_subject_data(data) - - #img = data['img'] - #print("img shape ", img.shape) - #print("seg shape ", data['seg'].shape) - #label_remap = {0:0} - #label_remap.update({roi_id : 1 for roi_id in range(1,5)}) - #plg.view_slices(cf, img[0], data['seg'], instance_labels=True, - # out_dir="experiments/dev/ex_slices.png") - - mins, secs = divmod((time.time() - stime), 60) - h, mins = divmod(mins, 60) - t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) - print("Prepro program runtime: {}".format(t)) diff --git a/datasets/toy/configs.py b/datasets/toy/configs.py index 9c39db3..b4759c7 100644 --- a/datasets/toy/configs.py +++ b/datasets/toy/configs.py @@ -1,495 +1,495 @@ #!/usr/bin/env python # Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import sys import os sys.path.append(os.path.dirname(os.path.realpath(__file__))) import numpy as np from default_configs import DefaultConfigs from collections import namedtuple boxLabel = namedtuple('boxLabel', ["name", "color"]) Label = namedtuple("Label", ['id', 'name', 'shape', 'radius', 'color', 'regression', 'ambiguities', 'gt_distortion']) binLabel = namedtuple("binLabel", ['id', 'name', 'color', 'bin_vals']) class Configs(DefaultConfigs): def __init__(self, server_env=None): super(Configs, self).__init__(server_env) ######################### # Prepro # ######################### self.pp_rootdir = os.path.join('/mnt/HDD2TB/Documents/data/toy', "cyl1ps_dev") self.pp_npz_dir = self.pp_rootdir+"_npz" self.pre_crop_size = [320,320,8] #y,x,z; determines pp data shape (2D easily implementable, but only 3D for now) self.min_2d_radius = 6 #in pixels self.n_train_samples, self.n_test_samples = 80, 80 # not actually real one-hot encoding (ohe) but contains more info: roi-overlap only within classes. self.pp_create_ohe_seg = False self.pp_empty_samples_ratio = 0.1 self.pp_place_radii_mid_bin = True self.pp_only_distort_2d = True # outer-most intensity of blurred radii, relative to inner-object intensity. <1 for decreasing, > 1 for increasing. # e.g.: setting 0.1 means blurred edge has min intensity 10% as large as inner-object intensity. self.pp_blur_min_intensity = 0.2 self.max_instances_per_sample = 1 #how many max instances over all classes per sample (img if 2d, vol if 3d) self.max_instances_per_class = self.max_instances_per_sample # how many max instances per image per class self.noise_scale = 0. # std-dev of gaussian noise self.ambigs_sampling = "gaussian" #"gaussian" or "uniform" """ radius_calib: gt distort for calibrating uncertainty. Range of gt distortion is inferable from image by distinguishing it from the rest of the object. blurring width around edge will be shifted so that symmetric rel to orig radius. blurring scale: if self.ambigs_sampling is uniform, distribution's non-zero range (b-a) will be sqrt(12)*scale since uniform dist has variance (b-a)²/12. b,a will be placed symmetrically around unperturbed radius. if sampling is gaussian, then scale parameter sets one std dev, i.e., blurring width will be orig_radius * std_dev * 2. """ self.ambiguities = { #set which classes to apply which ambs to below in class labels #choose out of: 'outer_radius', 'inner_radius', 'radii_relations'. #kind #probability #scale (gaussian std, relative to unperturbed value) #"outer_radius": (1., 0.5), #"outer_radius_xy": (1., 0.5), #"inner_radius": (0.5, 0.1), #"radii_relations": (0.5, 0.1), "radius_calib": (1., 1./6) } # shape choices: 'cylinder', 'block' self.pp_classes = [Label(1, 'cylinder', 'cylinder', ((6,6,1),(40,40,8)), (*self.blue, 1.), "radius_2d", (), ('radius_calib',)), #Label(2, 'block', 'block', ((6,6,1),(40,40,8)), (*self.aubergine,1.), "radii_2d", (), ('radius_calib',)) ] ######################### # I/O # ######################### - #self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_dev' - self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_exact' - #self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_ambig_beyond_bin' + self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_dev' + #self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_exact' + self.data_sourcedir = '/mnt/HDD2TB/Documents/data/toy/cyl1ps_ambig_beyond_bin' if server_env: #self.data_sourcedir = '/datasets/data_ramien/toy/cyl1ps_exact_npz' self.data_sourcedir = '/datasets/data_ramien/toy/cyl1ps_ambig_beyond_bin_npz' self.test_data_sourcedir = os.path.join(self.data_sourcedir, 'test') self.data_sourcedir = os.path.join(self.data_sourcedir, "train") self.info_df_name = 'info_df.pickle' # one out of ['mrcnn', 'retina_net', 'retina_unet', 'detection_unet', 'ufrcnn', 'detection_fpn']. - self.model = 'retina_net' + self.model = 'retina_unet' self.model_path = 'models/{}.py'.format(self.model if not 'retina' in self.model else 'retina_net') self.model_path = os.path.join(self.source_dir, self.model_path) ######################### # Architecture # ######################### # one out of [2, 3]. dimension the model operates in. - self.dim = 2 + self.dim = 3 # 'class', 'regression', 'regression_bin', 'regression_ken_gal' # currently only tested mode is a single-task at a time (i.e., only one task in below list) # but, in principle, tasks could be combined (e.g., object classes and regression per class) - self.prediction_tasks = ['class',] + self.prediction_tasks = ['class'] self.start_filts = 48 if self.dim == 2 else 18 self.end_filts = self.start_filts * 4 if self.dim == 2 else self.start_filts * 2 self.res_architecture = 'resnet50' # 'resnet101' , 'resnet50' self.norm = 'instance_norm' # one of None, 'instance_norm', 'batch_norm' self.relu = 'relu' # one of 'xavier_uniform', 'xavier_normal', or 'kaiming_normal', None (=default = 'kaiming_uniform') self.weight_init = None self.regression_n_features = 1 # length of regressor target vector ######################### # Data Loader # ######################### - self.num_epochs = 32 - self.num_train_batches = 120 if self.dim == 2 else 80 + self.num_epochs = 1 + self.num_train_batches = 10 if self.dim == 2 else 16 self.batch_size = 16 if self.dim == 2 else 8 self.n_cv_splits = 4 # select modalities from preprocessed data self.channels = [0] self.n_channels = len(self.channels) # which channel (mod) to show as bg in plotting, will be extra added to batch if not in self.channels self.plot_bg_chan = 0 self.crop_margin = [20, 20, 1] # has to be smaller than respective patch_size//2 self.patch_size_2D = self.pre_crop_size[:2] self.patch_size_3D = self.pre_crop_size[:2]+[8] # patch_size to be used for training. pre_crop_size is the patch_size before data augmentation. self.patch_size = self.patch_size_2D if self.dim == 2 else self.patch_size_3D # ratio of free sampled batch elements before class balancing is triggered # (>0 to include "empty"/background patches.) self.batch_random_ratio = 0.2 self.balance_target = "class_targets" if 'class' in self.prediction_tasks else "rg_bin_targets" self.observables_patient = [] self.observables_rois = [] self.seed = 3 #for generating folds ############################# # Colors, Classes, Legends # ############################# - self.plot_frequency = 1 + self.plot_frequency = 5 binary_bin_labels = [binLabel(1, 'r<=25', (*self.green, 1.), (1,25)), binLabel(2, 'r>25', (*self.red, 1.), (25,))] quintuple_bin_labels = [binLabel(1, 'r2-10', (*self.green, 1.), (2,10)), binLabel(2, 'r10-20', (*self.yellow, 1.), (10,20)), binLabel(3, 'r20-30', (*self.orange, 1.), (20,30)), binLabel(4, 'r30-40', (*self.bright_red, 1.), (30,40)), binLabel(5, 'r>40', (*self.red, 1.), (40,))] # choose here if to do 2-way or 5-way regression-bin classification task_spec_bin_labels = quintuple_bin_labels self.class_labels = [ # regression: regression-task label, either value or "(x,y,z)_radius" or "radii". # ambiguities: name of above defined ambig to apply to image data (not gt); need to be iterables! # gt_distortion: name of ambig to apply to gt only; needs to be iterable! # #id #name #shape #radius #color #regression #ambiguities #gt_distortion Label( 0, 'bg', None, (0, 0, 0), (*self.white, 0.), (0, 0, 0), (), ())] if "class" in self.prediction_tasks: self.class_labels += self.pp_classes else: self.class_labels += [Label(1, 'object', 'object', ('various',), (*self.orange, 1.), ('radius_2d',), ("various",), ('various',))] if any(['regression' in task for task in self.prediction_tasks]): self.bin_labels = [binLabel(0, 'bg', (*self.white, 1.), (0,))] self.bin_labels += task_spec_bin_labels self.bin_id2label = {label.id: label for label in self.bin_labels} bins = [(min(label.bin_vals), max(label.bin_vals)) for label in self.bin_labels] self.bin_id2rg_val = {ix: [np.mean(bin)] for ix, bin in enumerate(bins)} self.bin_edges = [(bins[i][1] + bins[i + 1][0]) / 2 for i in range(len(bins) - 1)] self.bin_dict = {label.id: label.name for label in self.bin_labels if label.id != 0} if self.class_specific_seg: self.seg_labels = self.class_labels self.box_type2label = {label.name: label for label in self.box_labels} self.class_id2label = {label.id: label for label in self.class_labels} self.class_dict = {label.id: label.name for label in self.class_labels if label.id != 0} self.seg_id2label = {label.id: label for label in self.seg_labels} self.cmap = {label.id: label.color for label in self.seg_labels} self.plot_prediction_histograms = True self.plot_stat_curves = False self.has_colorchannels = False self.plot_class_ids = True self.num_classes = len(self.class_dict) self.num_seg_classes = len(self.seg_labels) ######################### # Data Augmentation # ######################### self.do_aug = True self.da_kwargs = { 'mirror': True, 'mirror_axes': tuple(np.arange(0, self.dim, 1)), 'do_elastic_deform': False, 'alpha': (500., 1500.), 'sigma': (40., 45.), 'do_rotation': False, 'angle_x': (0., 2 * np.pi), 'angle_y': (0., 0), 'angle_z': (0., 0), 'do_scale': False, 'scale': (0.8, 1.1), 'random_crop': False, 'rand_crop_dist': (self.patch_size[0] / 2. - 3, self.patch_size[1] / 2. - 3), 'border_mode_data': 'constant', 'border_cval_data': 0, 'order_data': 1 } if self.dim == 3: self.da_kwargs['do_elastic_deform'] = False self.da_kwargs['angle_x'] = (0, 0.0) self.da_kwargs['angle_y'] = (0, 0.0) # must be 0!! self.da_kwargs['angle_z'] = (0., 2 * np.pi) ######################### # Schedule / Selection # ######################### # decide whether to validate on entire patient volumes (like testing) or sampled patches (like training) # the former is morge accurate, while the latter is faster (depending on volume size) - self.val_mode = 'val_sampling' # one of 'val_sampling' , 'val_patient' + self.val_mode = 'val_patient' # one of 'val_sampling' , 'val_patient' if self.val_mode == 'val_patient': self.max_val_patients = 220 # if 'all' iterates over entire val_set once. if self.val_mode == 'val_sampling': - self.num_val_batches = 25 if self.dim==2 else 15 + self.num_val_batches = 200 if self.dim==2 else 100 self.save_n_models = 2 self.min_save_thresh = 1 if self.dim == 2 else 1 # =wait time in epochs if "class" in self.prediction_tasks: self.model_selection_criteria = {name + "_ap": 1. for name in self.class_dict.values()} elif any("regression" in task for task in self.prediction_tasks): self.model_selection_criteria = {name + "_ap": 0.2 for name in self.class_dict.values()} self.model_selection_criteria.update({name + "_avp": 0.8 for name in self.class_dict.values()}) self.lr_decay_factor = 0.5 self.scheduling_patience = int(self.num_epochs / 5) self.weight_decay = 1e-5 self.clip_norm = None # number or None ######################### # Testing / Plotting # ######################### self.test_aug_axes = (0,1,(0,1)) # None or list: choices are 0,1,(0,1) self.held_out_test_set = True self.max_test_patients = "all" # number or "all" for all self.test_against_exact_gt = not 'exact' in self.data_sourcedir self.val_against_exact_gt = False # True is an unrealistic --> irrelevant scenario. self.report_score_level = ['rois'] # 'patient' or 'rois' (incl) self.patient_class_of_interest = 1 self.patient_bin_of_interest = 2 self.eval_bins_separately = False#"additionally" if not 'class' in self.prediction_tasks else False self.metrics = ['ap', 'auc', 'dice'] if any(['regression' in task for task in self.prediction_tasks]): self.metrics += ['avp', 'rg_MAE_weighted', 'rg_MAE_weighted_tp', 'rg_bin_accuracy_weighted', 'rg_bin_accuracy_weighted_tp'] if 'aleatoric' in self.model: self.metrics += ['rg_uncertainty', 'rg_uncertainty_tp', 'rg_uncertainty_tp_weighted'] self.evaluate_fold_means = True self.ap_match_ious = [0.5] # threshold(s) for considering a prediction as true positive self.min_det_thresh = 0.3 - self.model_max_iou_resolution = 0.2 + self.model_max_iou_resolution = 0.9 # aggregation method for test and val_patient predictions. # wbc = weighted box clustering as in https://arxiv.org/pdf/1811.08661.pdf, # nms = standard non-maximum suppression, or None = no clustering self.clustering = 'wbc' # iou thresh (exclusive!) for regarding two preds as concerning the same ROI self.clustering_iou = self.model_max_iou_resolution # has to be larger than desired possible overlap iou of model predictions self.merge_2D_to_3D_preds = False self.merge_3D_iou = self.model_max_iou_resolution self.n_test_plots = 1 # per fold and rank self.test_n_epochs = self.save_n_models # should be called n_test_ens, since is number of models to ensemble over during testing # is multiplied by (1 + nr of test augs) #self.losses_to_monitor += ['class_loss', 'rg_loss'] ######################### # Assertions # ######################### if not 'class' in self.prediction_tasks: assert self.num_classes == 1 ######################### # Add model specifics # ######################### {'mrcnn': self.add_mrcnn_configs, 'mrcnn_aleatoric': self.add_mrcnn_configs, 'retina_net': self.add_mrcnn_configs, 'retina_unet': self.add_mrcnn_configs, 'detection_unet': self.add_det_unet_configs, 'detection_fpn': self.add_det_fpn_configs }[self.model]() def rg_val_to_bin_id(self, rg_val): #only meant for isotropic radii!! # only 2D radii (x and y dims) or 1D (x or y) are expected return np.round(np.digitize(rg_val, self.bin_edges).mean()) - def add_det_fpn_configs(self): self.learning_rate = [5 * 1e-4] * self.num_epochs self.dynamic_lr_scheduling = True self.scheduling_criterion = 'torch_loss' self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' self.n_roi_candidates = 4 if self.dim == 2 else 6 # max number of roi candidates to identify per image (slice in 2D, volume in 3D) # loss mode: either weighted cross entropy ('wce'), batch-wise dice loss ('dice), or the sum of both ('dice_wce') self.seg_loss_mode = 'wce' self.wce_weights = [1] * self.num_seg_classes if 'dice' in self.seg_loss_mode else [0.1, 1, 1] self.fp_dice_weight = 1 if self.dim == 2 else 1 # if <1, false positive predictions in foreground are penalized less. self.detection_min_confidence = 0.05 # how to determine score of roi: 'max' or 'median' self.score_det = 'max' def add_det_unet_configs(self): self.learning_rate = [5 * 1e-4] * self.num_epochs self.dynamic_lr_scheduling = True self.scheduling_criterion = "torch_loss" self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' # max number of roi candidates to identify per image (slice in 2D, volume in 3D) self.n_roi_candidates = 4 if self.dim == 2 else 6 # loss mode: either weighted cross entropy ('wce'), batch-wise dice loss ('dice), or the sum of both ('dice_wce') self.seg_loss_mode = 'wce' self.wce_weights = [1] * self.num_seg_classes if 'dice' in self.seg_loss_mode else [0.1, 1, 1] # if <1, false positive predictions in foreground are penalized less. self.fp_dice_weight = 1 if self.dim == 2 else 1 self.detection_min_confidence = 0.05 # how to determine score of roi: 'max' or 'median' self.score_det = 'max' self.init_filts = 32 self.kernel_size = 3 # ks for horizontal, normal convs self.kernel_size_m = 2 # ks for max pool self.pad = "same" # "same" or integer, padding of horizontal convs def add_mrcnn_configs(self): self.learning_rate = [1e-4] * self.num_epochs self.dynamic_lr_scheduling = True # with scheduler set in exec self.scheduling_criterion = max(self.model_selection_criteria, key=self.model_selection_criteria.get) self.scheduling_mode = 'min' if "loss" in self.scheduling_criterion else 'max' # number of classes for network heads: n_foreground_classes + 1 (background) self.head_classes = self.num_classes + 1 if 'class' in self.prediction_tasks else 2 # feed +/- n neighbouring slices into channel dimension. set to None for no context. self.n_3D_context = None if self.n_3D_context is not None and self.dim == 2: self.n_channels *= (self.n_3D_context * 2 + 1) self.detect_while_training = True # disable the re-sampling of mask proposals to original size for speed-up. # since evaluation is detection-driven (box-matching) and not instance segmentation-driven (iou-matching), # mask outputs are optional. self.return_masks_in_train = True self.return_masks_in_val = True self.return_masks_in_test = True # feature map strides per pyramid level are inferred from architecture. anchor scales are set accordingly. self.backbone_strides = {'xy': [4, 8, 16, 32], 'z': [1, 2, 4, 8]} # anchor scales are chosen according to expected object sizes in data set. Default uses only one anchor scale # per pyramid level. (outer list are pyramid levels (corresponding to BACKBONE_STRIDES), inner list are scales per level.) self.rpn_anchor_scales = {'xy': [[4], [8], [16], [32]], 'z': [[1], [2], [4], [8]]} # choose which pyramid levels to extract features from: P2: 0, P3: 1, P4: 2, P5: 3. self.pyramid_levels = [0, 1, 2, 3] # number of feature maps in rpn. typically lowered in 3D to save gpu-memory. self.n_rpn_features = 512 if self.dim == 2 else 64 # anchor ratios and strides per position in feature maps. self.rpn_anchor_ratios = [0.5, 1., 2.] self.rpn_anchor_stride = 1 # Threshold for first stage (RPN) non-maximum suppression (NMS): LOWER == HARDER SELECTION self.rpn_nms_threshold = max(0.8, self.model_max_iou_resolution) # loss sampling settings. self.rpn_train_anchors_per_image = 4 self.train_rois_per_image = 6 # per batch_instance self.roi_positive_ratio = 0.5 self.anchor_matching_iou = 0.8 # k negative example candidates are drawn from a pool of size k*shem_poolsize (stochastic hard-example mining), # where k<=#positive examples. self.shem_poolsize = 2 self.pool_size = (7, 7) if self.dim == 2 else (7, 7, 3) self.mask_pool_size = (14, 14) if self.dim == 2 else (14, 14, 5) self.mask_shape = (28, 28) if self.dim == 2 else (28, 28, 10) self.rpn_bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) self.bbox_std_dev = np.array([0.1, 0.1, 0.1, 0.2, 0.2, 0.2]) self.window = np.array([0, 0, self.patch_size[0], self.patch_size[1], 0, self.patch_size_3D[2]]) self.scale = np.array([self.patch_size[0], self.patch_size[1], self.patch_size[0], self.patch_size[1], self.patch_size_3D[2], self.patch_size_3D[2]]) # y1,x1,y2,x2,z1,z2 if self.dim == 2: self.rpn_bbox_std_dev = self.rpn_bbox_std_dev[:4] self.bbox_std_dev = self.bbox_std_dev[:4] self.window = self.window[:4] self.scale = self.scale[:4] self.plot_y_max = 1.5 self.n_plot_rpn_props = 5 if self.dim == 2 else 30 # per batch_instance (slice in 2D / patient in 3D) # pre-selection in proposal-layer (stage 1) for NMS-speedup. applied per batch element. self.pre_nms_limit = 2000 if self.dim == 2 else 4000 # n_proposals to be selected after NMS per batch element. too high numbers blow up memory if "detect_while_training" is True, # since proposals of the entire batch are forwarded through second stage as one "batch". self.roi_chunk_size = 1300 if self.dim == 2 else 500 self.post_nms_rois_training = 200 * (self.head_classes-1) if self.dim == 2 else 400 self.post_nms_rois_inference = 200 * (self.head_classes-1) # Final selection of detections (refine_detections) self.model_max_instances_per_batch_element = 9 if self.dim == 2 else 18 # per batch element and class. self.detection_nms_threshold = self.model_max_iou_resolution # needs to be > 0, otherwise all predictions are one cluster. self.model_min_confidence = 0.2 # iou for nms in box refining (directly after heads), should be >0 since ths>=x in mrcnn.py if self.dim == 2: self.backbone_shapes = np.array( [[int(np.ceil(self.patch_size[0] / stride)), int(np.ceil(self.patch_size[1] / stride))] for stride in self.backbone_strides['xy']]) else: self.backbone_shapes = np.array( [[int(np.ceil(self.patch_size[0] / stride)), int(np.ceil(self.patch_size[1] / stride)), int(np.ceil(self.patch_size[2] / stride_z))] for stride, stride_z in zip(self.backbone_strides['xy'], self.backbone_strides['z'] )]) if self.model == 'retina_net' or self.model == 'retina_unet': - # whether to use focal loss or SHEM for loss-sample selection + # whether to use focal loss (True) or hard-example mining (set focal_loss to False) self.focal_loss = True + # implement extra anchor-scales according to https://arxiv.org/abs/1708.02002 self.rpn_anchor_scales['xy'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in self.rpn_anchor_scales['xy']] self.rpn_anchor_scales['z'] = [[ii[0], ii[0] * (2 ** (1 / 3)), ii[0] * (2 ** (2 / 3))] for ii in self.rpn_anchor_scales['z']] self.n_anchors_per_pos = len(self.rpn_anchor_ratios) * 3 #self.n_rpn_features = 256 if self.dim == 2 else 64 # pre-selection of detections for NMS-speedup. per entire batch. self.pre_nms_limit = (500 if self.dim == 2 else 6250) * self.batch_size # anchor matching iou is lower than in Mask R-CNN according to https://arxiv.org/abs/1708.02002 self.anchor_matching_iou = 0.7 if self.model == 'retina_unet': self.operate_stride1 = True diff --git a/default_configs.py b/default_configs.py index c2d16e2..3d9ccdd 100644 --- a/default_configs.py +++ b/default_configs.py @@ -1,202 +1,202 @@ #!/usr/bin/env python # Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Default Configurations script. Avoids changing configs of all experiments if general settings are to be changed.""" import os from collections import namedtuple boxLabel = namedtuple('boxLabel', ["name", "color"]) class DefaultConfigs: def __init__(self, server_env=None, dim=2): self.server_env = server_env self.cuda_benchmark = True ######################### # I/O # ######################### self.dim = dim # int [0 < dataset_size]. select n patients from dataset for prototyping. self.select_prototype_subset = None # some default paths. self.source_dir = os.path.dirname(os.path.realpath(__file__)) # current dir. self.backbone_path = os.path.join(self.source_dir, 'models/backbone.py') self.input_df_name = 'info_df.pickle' if server_env: self.select_prototype_subset = None ######################### # Colors/legends # ######################### # in part from solarized theme. self.black = (0.1, 0.05, 0.) self.gray = (0.514, 0.580, 0.588) self.beige = (1., 1., 0.85) self.white = (0.992, 0.965, 0.890) self.green = (0.659, 0.792, 0.251) # [168, 202, 64] self.dark_green = (0.522, 0.600, 0.000) # [133.11, 153. , 0. ] self.cyan = (0.165, 0.631, 0.596) # [ 42.075, 160.905, 151.98 ] self.bright_blue = (0.85, 0.95, 1.) self.blue = (0.149, 0.545, 0.824) # [ 37.995, 138.975, 210.12 ] - self.dkfz_blue = (0, 75. / 255, 142. / 255) self.dark_blue = (0.027, 0.212, 0.259) # [ 6.885, 54.06 , 66.045] self.purple = (0.424, 0.443, 0.769) # [108.12 , 112.965, 196.095] self.aubergine = (0.62, 0.21, 0.44) # [ 157, 53 , 111] self.magenta = (0.827, 0.212, 0.510) # [210.885, 54.06 , 130.05 ] self.coral = (1., 0.251, 0.4) # [255,64,102] self.bright_red = (1., 0.15, 0.1) # [255, 38.25, 25.5] self.brighter_red = (0.863, 0.196, 0.184) # [220.065, 49.98 , 46.92 ] self.red = (0.87, 0.05, 0.01) # [ 223, 13, 2] self.dark_red = (0.6, 0.04, 0.005) self.orange = (0.91, 0.33, 0.125) # [ 232.05 , 84.15 , 31.875] self.dark_orange = (0.796, 0.294, 0.086) #[202.98, 74.97, 21.93] self.yellow = (0.95, 0.9, 0.02) # [ 242.25, 229.5 , 5.1 ] self.dark_yellow = (0.710, 0.537, 0.000) # [181.05 , 136.935, 0. ] self.color_palette = [self.blue, self.dark_blue, self.aubergine, self.green, self.yellow, self.orange, self.red, self.cyan, self.black] self.box_labels = [ # name color boxLabel("det", self.blue), boxLabel("prop", self.gray), boxLabel("pos_anchor", self.cyan), boxLabel("neg_anchor", self.cyan), boxLabel("neg_class", self.green), boxLabel("pos_class", self.aubergine), boxLabel("gt", self.red) ] # neg and pos in a medical sense, i.e., pos=positive diagnostic finding self.box_type2label = {label.name: label for label in self.box_labels} self.box_color_palette = {label.name: label.color for label in self.box_labels} # whether the input data is mono-channel or RGB/rgb self.has_colorchannels = False ######################### # Data Loader # ######################### #random seed for fold_generator and batch_generator. self.seed = 0 #number of threads for multithreaded tasks like batch generation, wcs, merge2dto3d self.n_workers = 16 if server_env else os.cpu_count() self.create_bounding_box_targets = True self.class_specific_seg = True # False if self.model=="mrcnn" else True ######################### # Architecture # ######################### self.prediction_tasks = ["class"] # 'class', 'regression_class', 'regression_kendall', 'regression_feindt' self.weight_decay = 0.0 # nonlinearity to be applied after convs with nonlinearity. one of 'relu' or 'leaky_relu' self.relu = 'relu' # if True initializes weights as specified in model script. else use default Pytorch init. self.weight_init = None # if True adds high-res decoder levels to feature pyramid: P1 + P0. (e.g. set to true in retina_unet configs) self.operate_stride1 = False ######################### # Optimization # ######################### self.optimizer = "ADAM" # "ADAM" or "SGD" or implemented additionals ######################### # Schedule # ######################### # number of folds in cross validation. self.n_cv_splits = 5 ######################### # Testing / Plotting # ######################### # perform mirroring at test time. (only XY. Z not done to not blow up predictions times). self.test_aug = True # if True, test data lies in a separate folder and is not part of the cross validation. self.held_out_test_set = False # if hold-out test set: eval each fold's parameters separately on the test set self.eval_test_fold_wise = True # if held_out_test_set provided, ensemble predictions over models of all trained cv-folds. self.ensemble_folds = False # what metrics to evaluate self.metrics = ['ap'] # whether to evaluate fold means when evaluating over more than one fold self.evaluate_fold_means = False # how often (in nr of epochs) to plot example batches during train/val self.plot_frequency = 1 # color specifications for all box_types in prediction_plot. self.box_color_palette = {'det': 'b', 'gt': 'r', 'neg_class': 'purple', 'prop': 'w', 'pos_class': 'g', 'pos_anchor': 'c', 'neg_anchor': 'c'} # scan over confidence score in evaluation to optimize it on the validation set. self.scan_det_thresh = False # plots roc-curves / prc-curves in evaluation. self.plot_stat_curves = False # if True: evaluate average precision per patient id and average over per-pid results, # instead of computing one ap over whole data set. self.per_patient_ap = False # threshold for clustering 2D box predictions to 3D Cubes. Overlap is computed in XY. self.merge_3D_iou = 0.1 ######################### # MRCNN # ######################### # if True, mask loss is not applied. used for data sets, where no pixel-wise annotations are provided. self.frcnn_mode = False self.return_masks_in_train = False # if True, unmolds masks in Mask R-CNN to full-res for plotting/monitoring. self.return_masks_in_val = False self.return_masks_in_test = False # needed if doing instance segmentation. evaluation not yet implemented. # add P6 to Feature Pyramid Network. self.sixth_pooling = False - ######################### # RetinaNet # ######################### + # whether to use focal loss (True) or hard-example mining (set focal_loss to False) self.focal_loss = False self.focal_loss_gamma = 2. + diff --git a/exec.py b/exec.py index 413db13..6dc00b5 100644 --- a/exec.py +++ b/exec.py @@ -1,348 +1,348 @@ #!/usr/bin/env python # Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ execution script. this where all routines come together and the only script you need to call. refer to parse args below to see options for execution. """ import plotting as plg import os import warnings import argparse import time import torch import utils.exp_utils as utils from evaluator import Evaluator from predictor import Predictor for msg in ["Attempting to set identical bottom==top results", "This figure includes Axes that are not compatible with tight_layout", "Data has no positive values, and therefore cannot be log-scaled.", ".*invalid value encountered in true_divide.*"]: warnings.filterwarnings("ignore", msg) def train(cf, logger): """ performs the training routine for a given fold. saves plots and selected parameters to the experiment dir specified in the configs. """ logger.info('performing training in {}D over fold {} on experiment {} with model {}'.format( cf.dim, cf.fold, cf.exp_dir, cf.model)) logger.time("train_val") # -------------- inits and settings ----------------- net = model.net(cf, logger).cuda() if cf.optimizer == "ADAM": optimizer = torch.optim.Adam(net.parameters(), lr=cf.learning_rate[0], weight_decay=cf.weight_decay) elif cf.optimizer == "SGD": optimizer = torch.optim.SGD(net.parameters(), lr=cf.learning_rate[0], weight_decay=cf.weight_decay, momentum=0.3) if cf.dynamic_lr_scheduling: scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode=cf.scheduling_mode, factor=cf.lr_decay_factor, patience=cf.scheduling_patience) model_selector = utils.ModelSelector(cf, logger) starting_epoch = 1 if cf.resume_from_checkpoint: starting_epoch = utils.load_checkpoint(cf.resume_from_checkpoint, net, optimizer) logger.info('resumed from checkpoint {} at epoch {}'.format(cf.resume_from_checkpoint, starting_epoch)) # prepare monitoring monitor_metrics = utils.prepare_monitoring(cf) logger.info('loading dataset and initializing batch generators...') batch_gen = data_loader.get_train_generators(cf, logger) # -------------- training ----------------- for epoch in range(starting_epoch, cf.num_epochs + 1): logger.info('starting training epoch {}/{}'.format(epoch, cf.num_epochs)) logger.time("train_epoch") net.train() train_results_list = [] train_evaluator = Evaluator(cf, logger, mode='train') for i in range(cf.num_train_batches): logger.time("train_batch_loadfw") batch = next(batch_gen['train']) batch_gen['train'].generator.stats['roi_counts'] += batch['roi_counts'] batch_gen['train'].generator.stats['empty_samples_count'] += batch['empty_samples_count'] logger.time("train_batch_loadfw") logger.time("train_batch_netfw") results_dict = net.train_forward(batch) logger.time("train_batch_netfw") logger.time("train_batch_bw") optimizer.zero_grad() results_dict['torch_loss'].backward() if cf.clip_norm: torch.nn.utils.clip_grad_norm_(net.parameters(), cf.clip_norm, norm_type=2) #gradient clipping optimizer.step() train_results_list.append(({k:v for k,v in results_dict.items() if k != "seg_preds"}, batch["pid"])) #slim res dict if not cf.server_env: print("\rFinished training batch " + "{}/{} in {:.1f}s ({:.2f}/{:.2f} forw load/net, {:.2f} backw).".format(i+1, cf.num_train_batches, logger.get_time("train_batch_loadfw")+ logger.get_time("train_batch_netfw") +logger.time("train_batch_bw"), logger.get_time("train_batch_loadfw",reset=True), logger.get_time("train_batch_netfw", reset=True), logger.get_time("train_batch_bw", reset=True)), end="", flush=True) print() #--------------- train eval ---------------- if (epoch-1)%cf.plot_frequency==0: # view an example batch plg.view_batch(cf, batch, results_dict, has_colorchannels=cf.has_colorchannels, show_gt_labels=True, out_file=os.path.join(cf.plot_dir, 'batch_example_train_{}.png'.format(cf.fold))) logger.time("evals") _, monitor_metrics['train'] = train_evaluator.evaluate_predictions(train_results_list, monitor_metrics['train']) #np_loss, torch_loss = train_loss_running_mean / cf.num_train_batches, monitor_metrics['train']["loss"][-1] #assert np_loss/torch_loss-1<0.005, "{} vs {}".format(np_loss, torch_loss) logger.time("evals") logger.time("train_epoch", toggle=False) del train_results_list #----------- validation ------------ logger.info('starting validation in mode {}.'.format(cf.val_mode)) logger.time("val_epoch") with torch.no_grad(): net.eval() val_results_list = [] val_evaluator = Evaluator(cf, logger, mode=cf.val_mode) val_predictor = Predictor(cf, net, logger, mode='val') for i in range(batch_gen['n_val']): logger.time("val_batch") batch = next(batch_gen[cf.val_mode]) if cf.val_mode == 'val_patient': results_dict = val_predictor.predict_patient(batch) elif cf.val_mode == 'val_sampling': results_dict = net.train_forward(batch, is_validation=True) val_results_list.append([results_dict, batch["pid"]]) if not cf.server_env: print("\rFinished validation {} {}/{} in {:.1f}s.".format('patient' if cf.val_mode=='val_patient' else 'batch', i + 1, batch_gen['n_val'], logger.time("val_batch")), end="", flush=True) print() #------------ val eval ------------- logger.time("val_plot") if (epoch - 1) % cf.plot_frequency == 0: plg.view_batch(cf, batch, results_dict, has_colorchannels=cf.has_colorchannels, show_gt_labels=True, out_file=os.path.join(cf.plot_dir, 'batch_example_val_{}.png'.format(cf.fold))) logger.time("val_plot") logger.time("evals") _, monitor_metrics['val'] = val_evaluator.evaluate_predictions(val_results_list, monitor_metrics['val']) model_selector.run_model_selection(net, optimizer, monitor_metrics, epoch) del val_results_list #----------- monitoring ------------- monitor_metrics.update({"lr": {str(g) : group['lr'] for (g, group) in enumerate(optimizer.param_groups)}}) logger.metrics2tboard(monitor_metrics, global_step=epoch) logger.time("evals") logger.info('finished epoch {}/{}, took {:.2f}s. train total: {:.2f}s, average: {:.2f}s. val total: {:.2f}s, average: {:.2f}s.'.format( epoch, cf.num_epochs, logger.get_time("train_epoch")+logger.time("val_epoch"), logger.get_time("train_epoch"), logger.get_time("train_epoch", reset=True)/cf.num_train_batches, logger.get_time("val_epoch"), logger.get_time("val_epoch", reset=True)/batch_gen["n_val"])) logger.info("time for evals: {:.2f}s, val plot {:.2f}s".format(logger.get_time("evals", reset=True), logger.get_time("val_plot", reset=True))) #-------------- scheduling ----------------- if not cf.dynamic_lr_scheduling: for param_group in optimizer.param_groups: param_group['lr'] = cf.learning_rate[epoch-1] else: scheduler.step(monitor_metrics["val"][cf.scheduling_criterion][-1]) logger.time("train_val") logger.info("Training and validating over {} epochs took {}".format(cf.num_epochs, logger.get_time("train_val", format="hms", reset=True))) batch_gen['train'].generator.print_stats(logger, plot=True) def test(cf, logger, max_fold=None): """performs testing for a given fold (or held out set). saves stats in evaluator. """ logger.time("test_fold") logger.info('starting testing model of fold {} in exp {}'.format(cf.fold, cf.exp_dir)) net = model.net(cf, logger).cuda() batch_gen = data_loader.get_test_generator(cf, logger) test_predictor = Predictor(cf, net, logger, mode='test') test_results_list = test_predictor.predict_test_set(batch_gen, return_results = not hasattr( cf, "eval_test_separately") or not cf.eval_test_separately) if test_results_list is not None: test_evaluator = Evaluator(cf, logger, mode='test') test_evaluator.evaluate_predictions(test_results_list) test_evaluator.score_test_df(max_fold=max_fold) mins, secs = divmod(logger.get_time("test_fold"), 60) h, mins = divmod(mins, 60) t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) logger.info('Testing of fold {} took {}.'.format(cf.fold, t)) if __name__ == '__main__': stime = time.time() parser = argparse.ArgumentParser() parser.add_argument('-m', '--mode', type=str, default='train_test', help='one out of: create_exp, analysis, train, train_test, or test') parser.add_argument('-f', '--folds', nargs='+', type=int, default=None, help='None runs over all folds in CV. otherwise specify list of folds.') - parser.add_argument('--exp_dir', type=str, default='/home/gregor/Documents/medicaldetectiontoolkit/datasets/prostate/experiments/dev', + parser.add_argument('--exp_dir', type=str, default='/home/gregor/Documents/RegRCNN/datasets/toy/experiments/dev', help='path to experiment dir. will be created if non existent.') parser.add_argument('--server_env', default=False, action='store_true', help='change IO settings to deploy models on a cluster.') parser.add_argument('--data_dest', type=str, default=None, help="path to final data folder if different from config") parser.add_argument('--use_stored_settings', default=False, action='store_true', help='load configs from existing exp_dir instead of source dir. always done for testing, ' 'but can be set to true to do the same for training. useful in job scheduler environment, ' 'where source code might change before the job actually runs.') parser.add_argument('--resume_from_checkpoint', type=str, default=None, help='path to checkpoint. if resuming from checkpoint, the desired fold still needs to be parsed via --folds.') - parser.add_argument('--dataset_name', type=str, default='prostate', help="path to the dataset-specific code in source_dir/datasets") + parser.add_argument('--dataset_name', type=str, default='toy', help="path to the dataset-specific code in source_dir/datasets") parser.add_argument('-d', '--dev', default=False, action='store_true', help="development mode: shorten everything") args = parser.parse_args() args.dataset_name = os.path.join("datasets", args.dataset_name) if not "datasets" in args.dataset_name else args.dataset_name folds = args.folds resume_from_checkpoint = None if args.resume_from_checkpoint in ['None', 'none'] else args.resume_from_checkpoint if args.mode == 'create_exp': cf = utils.prep_exp(args.dataset_name, args.exp_dir, args.server_env, use_stored_settings=False) logger = utils.get_logger(cf.exp_dir, cf.server_env) logger.info('created experiment directory at {}'.format(args.exp_dir)) elif args.mode == 'train' or args.mode == 'train_test': cf = utils.prep_exp(args.dataset_name, args.exp_dir, args.server_env, args.use_stored_settings) if args.dev: folds = [0,1] cf.batch_size, cf.num_epochs, cf.min_save_thresh, cf.save_n_models = 3 if cf.dim==2 else 1, 1, 0, 1 cf.num_train_batches, cf.num_val_batches, cf.max_val_patients = 7, 1, 1 cf.test_n_epochs = cf.save_n_models cf.max_test_patients = 1 torch.backends.cudnn.benchmark = cf.dim==3 else: torch.backends.cudnn.benchmark = cf.cuda_benchmark if args.data_dest is not None: cf.data_dest = args.data_dest logger = utils.get_logger(cf.exp_dir, cf.server_env) data_loader = utils.import_module('data_loader', os.path.join(args.dataset_name, 'data_loader.py')) model = utils.import_module('model', cf.model_path) logger.info("loaded model from {}".format(cf.model_path)) if folds is None: folds = range(cf.n_cv_splits) for fold in folds: """k-fold cross-validation: the dataset is split into k equally-sized folds, one used for validation, one for testing, the rest for training. This loop iterates k-times over the dataset, cyclically moving the splits. k==folds, fold in [0,folds) says which split is used for testing. """ cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(fold)) cf.fold, logger.fold = fold, fold cf.resume_from_checkpoint = resume_from_checkpoint if not os.path.exists(cf.fold_dir): os.mkdir(cf.fold_dir) train(cf, logger) cf.resume_from_checkpoint = None if args.mode == 'train_test': test(cf, logger) elif args.mode == 'test': cf = utils.prep_exp(args.dataset_name, args.exp_dir, args.server_env, use_stored_settings=True, is_training=False) if args.data_dest is not None: cf.data_dest = args.data_dest logger = utils.get_logger(cf.exp_dir, cf.server_env) data_loader = utils.import_module('data_loader', os.path.join(args.dataset_name, 'data_loader.py')) model = utils.import_module('model', cf.model_path) logger.info("loaded model from {}".format(cf.model_path)) fold_dirs = sorted([os.path.join(cf.exp_dir, f) for f in os.listdir(cf.exp_dir) if os.path.isdir(os.path.join(cf.exp_dir, f)) and f.startswith("fold")]) if folds is None: folds = range(cf.n_cv_splits) if args.dev: folds = folds[:2] cf.batch_size, cf.num_test_patients, cf.test_n_epochs = 1 if cf.dim==2 else 1, 2, 2 else: torch.backends.cudnn.benchmark = cf.cuda_benchmark for fold in folds: cf.fold = fold cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(cf.fold)) if cf.fold_dir in fold_dirs: test(cf, logger, max_fold=max([int(f[-1]) for f in fold_dirs])) else: logger.info("Skipping fold {} since no model parameters found.".format(fold)) # load raw predictions saved by predictor during testing, run aggregation algorithms and evaluation. elif args.mode == 'analysis': """ analyse already saved predictions. """ cf = utils.prep_exp(args.dataset_name, args.exp_dir, args.server_env, use_stored_settings=True, is_training=False) logger = utils.get_logger(cf.exp_dir, cf.server_env) if cf.held_out_test_set and not cf.eval_test_fold_wise: predictor = Predictor(cf, net=None, logger=logger, mode='analysis') results_list = predictor.load_saved_predictions() logger.info('starting evaluation...') cf.fold = 0 evaluator = Evaluator(cf, logger, mode='test') evaluator.evaluate_predictions(results_list) evaluator.score_test_df(max_fold=0) else: fold_dirs = sorted([os.path.join(cf.exp_dir, f) for f in os.listdir(cf.exp_dir) if os.path.isdir(os.path.join(cf.exp_dir, f)) and f.startswith("fold")]) if args.dev: fold_dirs = fold_dirs[:1] if folds is None: folds = range(cf.n_cv_splits) for fold in folds: cf.fold = fold cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(cf.fold)) if cf.fold_dir in fold_dirs: predictor = Predictor(cf, net=None, logger=logger, mode='analysis') results_list = predictor.load_saved_predictions() # results_list[x][1] is pid, results_list[x][0] is list of len samples-per-patient, each entry hlds # list of boxes per that sample, i.e., len(results_list[x][y][0]) would be nr of boxes in sample y of patient x logger.info('starting evaluation...') evaluator = Evaluator(cf, logger, mode='test') evaluator.evaluate_predictions(results_list) max_fold = max([int(f[-1]) for f in fold_dirs]) evaluator.score_test_df(max_fold=max_fold) else: logger.info("Skipping fold {} since no model parameters found.".format(fold)) else: raise ValueError('mode "{}" specified in args is not implemented.'.format(args.mode)) mins, secs = divmod((time.time() - stime), 60) h, mins = divmod(mins, 60) t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) logger.info("{} total runtime: {}".format(os.path.split(__file__)[1], t)) del logger torch.cuda.empty_cache() diff --git a/graphics_generation.py b/graphics_generation.py deleted file mode 100644 index 6c59a0c..0000000 --- a/graphics_generation.py +++ /dev/null @@ -1,1932 +0,0 @@ -""" -Created at 07/03/19 11:42 -@author: gregor -""" -import plotting as plg -import matplotlib.lines as mlines - -import os -import sys -import multiprocessing -from copy import deepcopy -import logging -import time - -import numpy as np -import pandas as pd -from scipy.stats import norm -from sklearn.metrics import confusion_matrix - -import utils.exp_utils as utils -import utils.model_utils as mutils -import utils.dataloader_utils as dutils -from utils.dataloader_utils import ConvertSegToBoundingBoxCoordinates - -import predictor as predictor_file -import evaluator as evaluator_file - - - -class NoDaemonProcess(multiprocessing.Process): - # make 'daemon' attribute always return False - def _get_daemon(self): - return False - def _set_daemon(self, value): - pass - daemon = property(_get_daemon, _set_daemon) - -# We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool -# because the latter is only a wrapper function, not a proper class. -class NoDaemonProcessPool(multiprocessing.pool.Pool): - Process = NoDaemonProcess - -class AttributeDict(dict): - __getattr__ = dict.__getitem__ - __setattr__ = dict.__setitem__ - -def get_cf(dataset_name, exp_dir=""): - - cf_path = os.path.join('datasets', dataset_name, exp_dir, "configs.py") - cf_file = utils.import_module('configs', cf_path) - - return cf_file.Configs() - - -def prostate_results_static(plot_dir=None): - cf = get_cf('prostate', '') - if plot_dir is None: - plot_dir = os.path.join('datasets', 'prostate', 'misc') - - text_fs = 18 - fig = plg.plt.figure(figsize=(6, 3)) #w,h - grid = plg.plt.GridSpec(1, 1, wspace=0.0, hspace=0.0, figure=fig) #r,c - - groups = ["b values", "ADC + b values", "T2"] - splits = ["Det. U-Net", "Mask R-CNN", "Faster R-CNN+"] - values = {"detu": [(0.296, 0.031), (0.312, 0.045), (0.090, 0.040)], - "mask": [(0.393, 0.051), (0.382, 0.047), (0.136, 0.016)], - "fast": [(0.424, 0.083), (0.390, 0.086), (0.036, 0.013)]} - bar_values = [[v[0] for v in split] for split in values.values()] - errors = [[v[1] for v in split] for split in values.values()] - ax = fig.add_subplot(grid[0,0]) - colors = [cf.aubergine, cf.blue, cf.dark_blue] - plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, errors=errors, colors=colors, ax=ax, legend=True, - title="Prostate Main Results (3D)", ylabel=r"Performance as $\mathrm{AP}_{10}$", xlabel="Input Modalities") - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, 'prostate_main_results.png'), dpi=600) - -def prostate_GT_examples(exp_dir='', plot_dir=None, pid=8., z_ix=None): - - import datasets.prostate.data_loader as dl - cf = get_cf('prostate', exp_dir) - cf.exp_dir = exp_dir - cf.fold = 0 - cf.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_di_250519_ps384_gs6071/" - dataset = dl.Dataset(cf) - dataset.init_FoldGenerator(cf.seed, cf.n_cv_splits) - dataset.generate_splits(check_file=os.path.join(cf.exp_dir, 'fold_ids.pickle')) - set_splits = dataset.fg.splits - - test_ids, val_ids = set_splits.pop(cf.fold), set_splits.pop(cf.fold - 1) - train_ids = np.concatenate(set_splits, axis=0) - - if cf.held_out_test_set: - train_ids = np.concatenate((train_ids, test_ids), axis=0) - test_ids = [] - print("data set loaded with: {} train / {} val / {} test patients".format(len(train_ids), len(val_ids), - len(test_ids))) - - - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'prostate', 'misc') - - text_fs = 18 - fig = plg.plt.figure(figsize=(10, 7.7)) #w,h - grid = plg.plt.GridSpec(3, 4, wspace=0.0, hspace=0.0, figure=fig) #r,c - text_x, text_y = 0.1, 0.8 - - # ------- DWI ------- - if z_ix is None: - z_ix_dwi = np.random.choice(dataset[pid]["fg_slices"]) - img = np.load(dataset[pid]["img"])[:,z_ix_dwi] # mods, z,y,x - seg = np.load(dataset[pid]["seg"])[z_ix_dwi] # z,y,x - ax = fig.add_subplot(grid[0,0]) - ax.imshow(img[0], cmap='gray') - ax.text(text_x, text_y, "ADC", size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis('off') - ax = fig.add_subplot(grid[0,1]) - ax.imshow(img[0], cmap='gray') - cmap = cf.class_cmap - for r_ix in np.unique(seg[seg>0]): - seg[seg==r_ix] = dataset[pid]["class_targets"][r_ix-1] - ax.imshow(plg.to_rgba(seg, cmap), alpha=1) - ax.text(text_x, text_y, "DWI GT", size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis('off') - for b_ix, b in enumerate([50,500,1000,1500]): - ax = fig.add_subplot(grid[1, b_ix]) - ax.imshow(img[b_ix+1], cmap='gray') - ax.text(text_x, text_y, r"{}{}".format("$b=$" if b_ix == 0 else "", b), size=text_fs, color=cf.white, - transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis('off') - - # ----- T2 ----- - cf.data_sourcedir = "/mnt/HDD2TB/Documents/data/prostate/data_t2_250519_ps384_gs6071/" - dataset = dl.Dataset(cf) - if z_ix is None: - if z_ix_dwi in dataset[pid]["fg_slices"]: - z_ix_t2 = z_ix_dwi - else: - z_ix_t2 = np.random.choice(dataset[pid]["fg_slices"]) - img = np.load(dataset[pid]["img"])[:,z_ix_t2] # mods, z,y,x - seg = np.load(dataset[pid]["seg"])[z_ix_t2] # z,y,x - ax = fig.add_subplot(grid[2,0]) - ax.imshow(img[0], cmap='gray') - ax.text(text_x, text_y, "T2w", size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis('off') - ax = fig.add_subplot(grid[2,1]) - ax.imshow(img[0], cmap='gray') - cmap = cf.class_cmap - for r_ix in np.unique(seg[seg>0]): - seg[seg==r_ix] = dataset[pid]["class_targets"][r_ix-1] - ax.imshow(plg.to_rgba(seg, cmap), alpha=1) - ax.text(text_x, text_y, "T2 GT", size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis('off') - - #grid.tight_layout(fig) - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, 'prostate_gt_examples.png'), dpi=600) - - -def prostate_dataset_stats(exp_dir='', plot_dir=None, show_splits=True,): - - import datasets.prostate.data_loader as dl - cf = get_cf('prostate', exp_dir) - cf.exp_dir = exp_dir - cf.fold = 0 - dataset = dl.Dataset(cf) - dataset.init_FoldGenerator(cf.seed, cf.n_cv_splits) - dataset.generate_splits(check_file=os.path.join(cf.exp_dir, 'fold_ids.pickle')) - set_splits = dataset.fg.splits - - test_ids, val_ids = set_splits.pop(cf.fold), set_splits.pop(cf.fold - 1) - train_ids = np.concatenate(set_splits, axis=0) - - if cf.held_out_test_set: - train_ids = np.concatenate((train_ids, test_ids), axis=0) - test_ids = [] - - print("data set loaded with: {} train / {} val / {} test patients".format(len(train_ids), len(val_ids), - len(test_ids))) - - df, labels = dataset.calc_statistics(subsets={"train": train_ids, "val": val_ids, "test": test_ids}, plot_dir=None) - - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'prostate', 'misc') - - if show_splits: - fig = plg.plt.figure(figsize=(6, 6)) # w, h - grid = plg.plt.GridSpec(2, 2, wspace=0.05, hspace=0.15, figure=fig) # rows, cols - else: - fig = plg.plt.figure(figsize=(6, 3.)) - grid = plg.plt.GridSpec(1, 1, wspace=0.0, hspace=0.15, figure=fig) - - ax = fig.add_subplot(grid[0,0]) - ax = plg.plot_data_stats(cf, df, labels, ax=ax) - ax.set_xlabel("") - ax.set_xticklabels(df.columns, rotation='horizontal', fontsize=11) - ax.set_title("") - if show_splits: - ax.text(0.05,0.95, 'a)', horizontalalignment='center', verticalalignment='center', transform = ax.transAxes, weight='bold') - ax.text(0, 25, "GS$=6$", horizontalalignment='center', verticalalignment='center', bbox=dict(facecolor=(*cf.white, 0.8), edgecolor=cf.dark_green, pad=3)) - ax.text(1, 25, "GS$\geq 7a$", horizontalalignment='center', verticalalignment='center', bbox=dict(facecolor=(*cf.white, 0.8), edgecolor=cf.red, pad=3)) - ax.margins(y=0.1) - - if show_splits: - ax = fig.add_subplot(grid[:, 1]) - ax = plg.plot_fold_stats(cf, df, labels, ax=ax) - ax.set_xlabel("") - ax.set_title("") - ax.text(0.05, 0.98, 'c)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, weight='bold') - ax.yaxis.tick_right() - ax.yaxis.set_label_position("right") - ax.margins(y=0.1) - - ax = fig.add_subplot(grid[1, 0]) - cf.balance_target = "lesion_gleasons" - dataset.df = None - df, labels = dataset.calc_statistics(plot_dir=None, overall_stats=True) - ax = plg.plot_data_stats(cf, df, labels, ax=ax) - ax.set_xlabel("") - ax.set_title("") - ax.text(0.05, 0.95, 'b)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, weight='bold') - ax.margins(y=0.1) - # rename GS according to names in thesis - renamer = {'GS60':'GS 6', 'GS71':'GS 7a', 'GS72':'GS 7b', 'GS80':'GS 8', 'GS90': 'GS 9', 'GS91':'GS 9a', 'GS92':'GS 9b'} - x_ticklabels = [str(l.get_text()) for l in ax.xaxis.get_ticklabels()] - ax.xaxis.set_ticklabels([renamer[l] for l in x_ticklabels]) - - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, 'data_stats_prostate.png'), dpi=600) - - return - -def lidc_merged_sa_joint_plot(exp_dir='', plot_dir=None): - import datasets.lidc.data_loader as dl - cf = get_cf('lidc', exp_dir) - cf.balance_target = "regression_targets" - - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'lidc', 'misc') - - cf.training_gts = 'merged' - dataset = dl.Dataset(cf, mode='train') - df, labels = dataset.calc_statistics(plot_dir=None, overall_stats=True) - - fig = plg.plt.figure(figsize=(4, 5.6)) #w, h - # fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(3, 1, wspace=0.0, hspace=0.7, figure=fig) #rows, cols - fs = 9 - - ax = fig.add_subplot(grid[0, 0]) - - labels = [AttributeDict({ 'name': rg_val, 'color': cf.bin_id2label[cf.rg_val_to_bin_id(rg_val)].color}) for rg_val - in df.columns] - ax = plg.plot_data_stats(cf, df, labels, ax=ax, fs=fs) - ax.set_xlabel("averaged multi-rater malignancy scores (ms)", fontsize=fs) - ax.set_title("") - ax.text(0.05, 0.91, 'a)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', fontsize=fs) - ax.margins(y=0.2) - - #----- single annotator ------- - cf.training_gts = 'sa' - dataset = dl.Dataset(cf, mode='train') - df, labels = dataset.calc_statistics(plot_dir=None, overall_stats=True) - - ax = fig.add_subplot(grid[1, 0]) - labels = [AttributeDict({ 'name': '{:.0f}'.format(rg_val), 'color': cf.bin_id2label[cf.rg_val_to_bin_id(rg_val)].color}) for rg_val - in df.columns] - mapper = {rg_val:'{:.0f}'.format(rg_val) for rg_val in df.columns} - df = df.rename(mapper, axis=1) - ax = plg.plot_data_stats(cf, df, labels, ax=ax, fs=fs) - ax.set_xlabel("unaggregrated single-rater malignancy scores (ms)", fontsize=fs) - ax.set_title("") - ax.text(0.05, 0.91, 'b)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', fontsize=fs) - ax.margins(y=0.45) - - #------ binned dissent ----- - #cf.balance_target = "regression_targets" - all_patients = [(pid,patient['rg_bin_targets']) for pid, patient in dataset.data.items()] - non_empty_patients = [(pid, lesions) for (pid, lesions) in all_patients if len(lesions) > 0] - - mean_std_per_lesion = np.array([(np.mean(roi), np.std(roi)) for (pid, lesions) in non_empty_patients for roi in lesions]) - distribution_max_per_lesion = [np.unique(roi, return_counts=True) for (pid, lesions) in non_empty_patients for roi in lesions] - distribution_max_per_lesion = np.array([uniq[cts.argmax()] for (uniq, cts) in distribution_max_per_lesion]) - - binned_stats = [[] for bin_id in cf.bin_id2rg_val.keys()] - for l_ix, mean_std in enumerate(mean_std_per_lesion): - bin_id = cf.rg_val_to_bin_id(mean_std[0]) - bin_id_max = cf.rg_val_to_bin_id(distribution_max_per_lesion[l_ix]) - binned_stats[int(bin_id)].append((*mean_std, distribution_max_per_lesion[l_ix], bin_id-bin_id_max)) - - ax = fig.add_subplot(grid[2, 0]) - plg.plot_binned_rater_dissent(cf, binned_stats, ax=ax, fs=fs) - ax.set_title("") - ax.text(0.05, 0.91, 'c)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', fontsize=fs) - ax.margins(y=0.2) - - - plg.plt.savefig(os.path.join(plot_dir, 'data_stats_lidc_solarized.png'), bbox_inches='tight', dpi=600) - - return - -def lidc_dataset_stats(exp_dir='', plot_dir=None): - - import datasets.lidc.data_loader as dl - cf = get_cf('lidc', exp_dir) - cf.data_rootdir = cf.pp_data_path - cf.balance_target = "regression_targets" - - dataset = dl.Dataset(cf, data_dir=cf.data_rootdir) - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'lidc', 'misc') - - df, labels = dataset.calc_statistics(plot_dir=plot_dir, overall_stats=True) - - return df, labels - -def lidc_sa_dataset_stats(exp_dir='', plot_dir=None): - - import datasets.lidc_sa.data_loader as dl - cf = get_cf('lidc_sa', exp_dir) - #cf.data_rootdir = cf.pp_data_path - cf.balance_target = "regression_targets" - - dataset = dl.Dataset(cf) - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'lidc_sa', 'misc') - - dataset.calc_statistics(plot_dir=plot_dir, overall_stats=True) - - all_patients = [(pid,patient['rg_bin_targets']) for pid, patient in dataset.data.items()] - empty_patients = [pid for (pid, lesions) in all_patients if len(lesions) == 0] - non_empty_patients = [(pid, lesions) for (pid, lesions) in all_patients if len(lesions) > 0] - full_consent_patients = [(pid, lesions) for (pid, lesions) in non_empty_patients if np.all([np.unique(roi).size == 1 for roi in lesions])] - all_lesions = [roi for (pid, lesions) in non_empty_patients for roi in lesions] - two_vote_min = [roi for (pid, lesions) in non_empty_patients for roi in lesions if np.count_nonzero(roi) > 1] - three_vote_min = [roi for (pid, lesions) in non_empty_patients for roi in lesions if np.count_nonzero(roi) > 2] - mean_std_per_lesion = np.array([(np.mean(roi), np.std(roi)) for (pid, lesions) in non_empty_patients for roi in lesions]) - avg_mean_std_pl = np.mean(mean_std_per_lesion, axis=0) - # call std dev per lesion disconsent from now on - disconsent_std = np.std(mean_std_per_lesion[:, 1]) - - distribution_max_per_lesion = [np.unique(roi, return_counts=True) for (pid, lesions) in non_empty_patients for roi in lesions] - distribution_max_per_lesion = np.array([uniq[cts.argmax()] for (uniq, cts) in distribution_max_per_lesion]) - - mean_max_delta = abs(mean_std_per_lesion[:, 0] - distribution_max_per_lesion) - - binned_stats = [[] for bin_id in cf.bin_id2rg_val.keys()] - for l_ix, mean_std in enumerate(mean_std_per_lesion): - bin_id = cf.rg_val_to_bin_id(mean_std[0]) - bin_id_max = cf.rg_val_to_bin_id(distribution_max_per_lesion[l_ix]) - binned_stats[int(bin_id)].append((*mean_std, distribution_max_per_lesion[l_ix], bin_id-bin_id_max)) - - plg.plot_binned_rater_dissent(cf, binned_stats, out_file=os.path.join(plot_dir, "binned_dissent.png")) - - - mean_max_bin_divergence = [[] for bin_id in cf.bin_id2rg_val.keys()] - for bin_id, bin_stats in enumerate(binned_stats): - mean_max_bin_divergence[bin_id].append([roi for roi in bin_stats if roi[3] != 0]) - mean_max_bin_divergence[bin_id].insert(0,len(mean_max_bin_divergence[bin_id][0])) - - - return - -def lidc_annotator_confusion(exp_dir='', plot_dir=None, normalize=None, dataset=None, plot=True): - """ - :param exp_dir: - :param plot_dir: - :param normalize: str or None. str in ['truth', 'pred'] - :param dataset: - :param plot: - :return: - """ - if dataset is None: - import datasets.lidc.data_loader as dl - cf = get_cf('lidc', exp_dir) - # cf.data_rootdir = cf.pp_data_path - cf.training_gts = "sa" - cf.balance_target = "regression_targets" - dataset = dl.Dataset(cf) - else: - cf = dataset.cf - - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'lidc', 'misc') - - dataset.calc_statistics(plot_dir=plot_dir, overall_stats=True) - - all_patients = [(pid,patient['rg_bin_targets']) for pid, patient in dataset.data.items()] - non_empty_patients = [(pid, lesions) for (pid, lesions) in all_patients if len(lesions) > 0] - - y_true, y_pred = [], [] - for (pid, lesions) in non_empty_patients: - for roi in lesions: - true_bin = cf.rg_val_to_bin_id(np.mean(roi)) - y_true.extend([true_bin] * len(roi)) - y_pred.extend(roi) - cm = confusion_matrix(y_true, y_pred) - if normalize in ["truth", "row"]: - cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] - elif normalize in ["pred", "prediction", "column", "col"]: - cm = cm.astype('float') / cm.sum(axis=0)[:, np.newaxis] - - if plot: - plg.plot_confusion_matrix(cf, cm, out_file=os.path.join(plot_dir, "annotator_confusion.pdf")) - - return cm - -def plot_lidc_dissent_and_example(confusion_matrix=True, bin_stds=False, plot_dir=None, numbering=True, example_title="Example"): - import datasets.lidc.data_loader as dl - dataset_name = 'lidc' - exp_dir1 = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rg_bs8' - exp_dir2 = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rgbin_bs8' - #exp_dir1 = '/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rg_bs8' - #exp_dir2 = '/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rgbin_bs8' - cf = get_cf(dataset_name, exp_dir1) - #file_names = [f_name for f_name in os.listdir(os.path.join(exp_dir, 'inference_analysis')) if f_name.endswith('.pkl')] - # file_names = [os.path.join(exp_dir, "inference_analysis", f_name) for f_name in file_names] - file_names = ["bytes_merged_boxes_fold_0_pid_0811a.pkl",] - z_ics = [194,] - plot_files = [ - {'files': [os.path.join(exp_dir, "inference_analysis", f_name) for exp_dir in [exp_dir1, exp_dir2]], - 'z_ix': z_ix} for (f_name, z_ix) in zip(file_names, z_ics) - ] - - cf.training_gts = 'sa' - info_df_path = '/mnt/HDD2TB/Documents/data/lidc/pp_20190805/patient_gts_{}/info_df.pickle'.format(cf.training_gts) - info_df = pd.read_pickle(info_df_path) - - cf.roi_items = ['regression_targets', 'rg_bin_targets_sa'] #['class_targets'] + cf.observables_rois - - text_fs = 14 - title_fs = text_fs - text_x, text_y = 0.06, 0.92 - fig = plg.plt.figure(figsize=(8.6, 3)) #w, h - #fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(1, 4, wspace=0.0, hspace=0.0, figure=fig) #rows, cols - cf.plot_class_ids = True - - f_ix = 0 - z_ix = plot_files[f_ix]['z_ix'] - for model_ix in range(2)[::-1]: - print("f_ix, m_ix", f_ix, model_ix) - plot_file = utils.load_obj(plot_files[f_ix]['files'][model_ix]) - batch = plot_file["batch"] - pid = batch["pid"][0] - batch['patient_rg_bin_targets_sa'] = info_df[info_df.pid == pid]['class_target'].tolist() - # apply same filter as with merged GTs: need at least two non-zero votes to consider a RoI. - batch['patient_rg_bin_targets_sa'] = [[four_votes.astype("uint8") for four_votes in batch_el if - np.count_nonzero(four_votes>0)>=2] for batch_el in - batch['patient_rg_bin_targets_sa']] - results_dict = plot_file["res_dict"] - - # pred - ax = fig.add_subplot(grid[0, model_ix+2]) - plg.view_batch_thesis(cf, batch, res_dict=results_dict, legend=False, sample_picks=None, fontsize=text_fs*1.3, - vol_slice_picks=[z_ix, ], show_gt_labels=True, box_score_thres=0.2, plot_mods=False, - seg_cmap="rg", show_cl_ids=False, - out_file=None, dpi=600, patient_items=True, return_fig=False, axes={'pred': ax}) - - #ax.set_title("{}".format("Reg R-CNN" if model_ix==0 else "Mask R-CNN"), size=title_fs) - ax.set_title("") - ax.set_xlabel("{}".format("Reg R-CNN" if model_ix == 0 else "Mask R-CNN"), size=title_fs) - if numbering: - ax.text(text_x, text_y, chr(model_ix+99)+")", horizontalalignment='center', verticalalignment='center', - transform=ax.transAxes, weight='bold', color=cf.white, fontsize=title_fs) - #ax.axis("off") - ax.axis("on") - plg.suppress_axes_lines(ax) - - # GT - if model_ix==0: - ax.set_title(example_title, fontsize=title_fs) - ax = fig.add_subplot(grid[0, 1]) - # ax.imshow(batch['patient_data'][0, 0, :, :, z_ix], cmap='gray') - # ax.imshow(plg.to_rgba(batch['patient_seg'][0,0,:,:,z_ix], cf.cmap), alpha=0.8) - plg.view_batch_thesis(cf, batch, res_dict=results_dict, legend=True, sample_picks=None, fontsize=text_fs*1.3, - vol_slice_picks=[z_ix, ], show_gt_labels=True, box_score_thres=0.13, plot_mods=False, seg_cmap="rg", - out_file=None, dpi=600, patient_items=True, return_fig=False, axes={'gt':ax}) - if numbering: - ax.text(text_x, text_y, "b)", horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', color=cf.white, fontsize=title_fs) - #ax.set_title("Ground Truth", size=title_fs) - ax.set_title("") - ax.set_xlabel("Ground Truth", size=title_fs) - plg.suppress_axes_lines(ax) - #ax.axis('off') - #----- annotator dissent plot(s) ------ - - cf.training_gts = 'sa' - cf.balance_targets = 'rg_bin_targets' - dataset = dl.Dataset(cf, mode='train') - - if bin_stds: - #------ binned dissent ----- - #cf = get_cf('lidc', "") - - #cf.balance_target = "regression_targets" - all_patients = [(pid,patient['rg_bin_targets']) for pid, patient in dataset.data.items()] - non_empty_patients = [(pid, lesions) for (pid, lesions) in all_patients if len(lesions) > 0] - - mean_std_per_lesion = np.array([(np.mean(roi), np.std(roi)) for (pid, lesions) in non_empty_patients for roi in lesions]) - distribution_max_per_lesion = [np.unique(roi, return_counts=True) for (pid, lesions) in non_empty_patients for roi in lesions] - distribution_max_per_lesion = np.array([uniq[cts.argmax()] for (uniq, cts) in distribution_max_per_lesion]) - - binned_stats = [[] for bin_id in cf.bin_id2rg_val.keys()] - for l_ix, mean_std in enumerate(mean_std_per_lesion): - bin_id = cf.rg_val_to_bin_id(mean_std[0]) - bin_id_max = cf.rg_val_to_bin_id(distribution_max_per_lesion[l_ix]) - binned_stats[int(bin_id)].append((*mean_std, distribution_max_per_lesion[l_ix], bin_id-bin_id_max)) - - ax = fig.add_subplot(grid[0, 0]) - plg.plot_binned_rater_dissent(cf, binned_stats, ax=ax, fs=text_fs) - if numbering: - ax.text(text_x, text_y, 'a)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', fontsize=title_fs) - ax.margins(y=0.2) - ax.set_xlabel("Malignancy-Score Bins", fontsize=title_fs) - #ax.yaxis.set_label_position("right") - #ax.yaxis.tick_right() - ax.set_yticklabels([]) - #ax.xaxis.set_label_position("top") - #ax.xaxis.tick_top() - ax.set_title("Average Rater Dissent", fontsize=title_fs) - - if confusion_matrix: - #------ confusion matrix ------- - cm = lidc_annotator_confusion(dataset=dataset, plot=False, normalize="truth") - ax = fig.add_subplot(grid[0, 0]) - cmap = plg.make_colormap([(1,1,1), cf.dkfz_blue]) - plg.plot_confusion_matrix(cf, cm, ax=ax, fs=text_fs, color_bar=False, cmap=cmap )#plg.plt.cm.Purples) - ax.set_xticks(np.arange(cm.shape[1])) - if numbering: - ax.text(-0.16, text_y, 'a)', horizontalalignment='center', verticalalignment='center', transform=ax.transAxes, - weight='bold', fontsize=title_fs) - ax.margins(y=0.2) - ax.set_title("Annotator Dissent", fontsize=title_fs) - - #fig.suptitle(" Example", fontsize=title_fs) - #fig.text(0.63, 1.03, "Example", va="center", ha="center", size=title_fs, transform=fig.transFigure) - - #fig_patches = fig_leg.get_patches() - #patches= [plg.mpatches.Patch(color=label.color, label="{:.10s}".format(label.name)) for label in cf.bin_id2label.values() if label.id!=0] - #fig.legends.append(fig_leg) - #plg.plt.figlegend(handles=patches, loc="lower center", bbox_to_anchor=(0.5, 0.0), borderaxespad=0., - # ncol=len(patches), bbox_transform=fig.transFigure, title="Binned Malignancy Score", fontsize= text_fs) - plg.plt.tight_layout() - if plot_dir is None: - plot_dir = "datasets/lidc/misc" - out_file = os.path.join(plot_dir, "regrcnn_lidc_diss_example.png") - if out_file is not None: - plg.plt.savefig(out_file, dpi=600, bbox_inches='tight') - -def lidc_annotator_dissent_images(exp_dir='', plot_dir=None): - if plot_dir is None: - plot_dir = "datasets/lidc/misc" - - import datasets.lidc.data_loader as dl - cf = get_cf('lidc', exp_dir) - cf.training_gts = "sa" - - dataset = dl.Dataset(cf, mode='train') - - pids = {'0069a': 132, '0493a':125, '1008a': 164}#, '0355b': 138, '0484a': 86} # pid : (z_ix to show) - # add_pids = dataset.set_ids[65:80] - # for pid in add_pids: - # try: - # - # pids[pid] = int(np.median(dataset.data[pid]['fg_slices'][0])) - # - # except (IndexError, ValueError): - # print("pid {} has no foreground".format(pid)) - - if not os.path.exists(plot_dir): - os.mkdir(plot_dir) - out_file = os.path.join(plot_dir, "lidc_example_rater_dissent.png") - - #cf.training_gts = 'sa' - cf.roi_items = ['regression_targets', 'rg_bin_targets_sa'] #['class_targets'] + cf.observables_rois - - title_fs = 14 - text_fs = 14 - fig = plg.plt.figure(figsize=(10, 5.9)) #w, h - #fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(len(pids.keys()), 5, wspace=0.0, hspace=0.0, figure=fig) #rows, cols - cf.plot_class_ids = True - cmap = {id : (label.color if id!=0 else (0.,0.,0.)) for id, label in cf.bin_id2label.items()} - legend_handles = set() - window_size = (250,250) - - for p_ix, (pid, z_ix) in enumerate(pids.items()): - try: - print("plotting pid, z_ix", pid, z_ix) - patient = dataset[pid] - img = np.load(patient['data'], mmap_mode='r')[z_ix] # z,y,x --> y,x - seg = np.load(patient['seg'], mmap_mode='r')['seg'][:,z_ix] # rater,z,y,x --> rater,y,x - rg_bin_targets = patient['rg_bin_targets'] - - contours = np.nonzero(seg[0]) - center_y, center_x = np.median(contours[0]), np.median(contours[1]) - #min_y, min_x = np.min(contours[0]), np.min(contours[1]) - #max_y, max_x = np.max(contours[0]), np.max(contours[1]) - #buffer_y, buffer_x = int(seg.shape[1]*0.5), int(seg.shape[2]*0.5) - #y_range = np.arange(max(min_y-buffer_y, 0), min(min_y+buffer_y, seg.shape[1])) - #x_range = np.arange(max(min_x-buffer_x, 0), min(min_x+buffer_x, seg.shape[2])) - y_range = np.arange(max(int(center_y-window_size[0]/2), 0), min(int(center_y+window_size[0]/2), seg.shape[1])) - - min_x = int(center_x-window_size[1]/2) - max_x = int(center_x+window_size[1]/2) - if min_x<0: - max_x += abs(min_x) - elif max_x>seg.shape[2]: - min_x -= max_x-seg.shape[2] - x_range = np.arange(max(min_x, 0), min(max_x, seg.shape[2])) - img = img[y_range][:,x_range] - seg = seg[:, y_range][:,:,x_range] - # data - ax = fig.add_subplot(grid[p_ix, 0]) - ax.imshow(img, cmap='gray') - - plg.suppress_axes_lines(ax) - # key = "spec" if "spec" in batch.keys() else "pid" - ylabel = str(pid) + "/" + str(z_ix) - ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number - if p_ix == 0: - ax.set_title("Image", fontsize=title_fs) - - # raters - for r_ix in range(seg.shape[0]): - rater_bin_targets = rg_bin_targets[:,r_ix] - for roi_ix, rating in enumerate(rater_bin_targets): - seg[r_ix][seg[r_ix]==roi_ix+1] = rating - ax = fig.add_subplot(grid[p_ix, r_ix+1]) - ax.imshow(seg[r_ix], cmap='gray') - ax.imshow(plg.to_rgba(seg[r_ix], cmap), alpha=0.8) - ax.axis('off') - if p_ix == 0: - ax.set_title("Rating {}".format(r_ix+1), fontsize=title_fs) - legend_handles.update([cf.bin_id2label[id] for id in np.unique(seg[r_ix]) if id!=0]) - except: - print("failed pid", pid) - pass - - legend_handles = [plg.mpatches.Patch(color=label.color, label="{:.10s}".format(label.name)) for label in legend_handles] - legend_handles = sorted(legend_handles, key=lambda h: h._label) - fig.suptitle("LIDC Single-Rater Annotations", fontsize=title_fs) - #patches= [plg.mpatches.Patch(color=label.color, label="{:.10s}".format(label.name)) for label in cf.bin_id2label.values() if label.id!=0] - - legend = fig.legend(handles=legend_handles, loc="lower center", bbox_to_anchor=(0.5, 0.0), borderaxespad=0, fontsize=text_fs, - bbox_transform=fig.transFigure, ncol=len(legend_handles), title="Malignancy Score") - plg.plt.setp(legend.get_title(), fontsize=title_fs) - #grid.tight_layout(fig) - #plg.plt.tight_layout(rect=[0, 0.00, 1, 1.5]) - if out_file is not None: - plg.plt.savefig(out_file, dpi=600, bbox_inches='tight') - - - - return - -def lidc_results_static(xlabels=None, plot_dir=None, in_percent=True): - cf = get_cf('lidc', '') - if plot_dir is None: - plot_dir = os.path.join('datasets', 'lidc', 'misc') - - text_fs = 18 - fig = plg.plt.figure(figsize=(3, 2.5)) #w,h - grid = plg.plt.GridSpec(2, 1, wspace=0.0, hspace=0.0, figure=fig) #r,c - - #--- LIDC 3D ----- - - - splits = ["Reg R-CNN", "Mask R-CNN"]#, "Reg R-CNN 2D", "Mask R-CNN 2D"] - values = {"reg3d": [(0.259, 0.035), (0.628, 0.038), (0.477, 0.035)], - "mask3d": [(0.235, 0.027), (0.622, 0.029), (0.411, 0.026)],} - groups = [r"$\mathrm{AVP}_{10}$", "$\mathrm{AP}_{10}$", "Bin Acc."] - if in_percent: - bar_values = [[v[0]*100 for v in split] for split in values.values()] - errors = [[v[1]*100 for v in split] for split in values.values()] - else: - bar_values = [[v[0] for v in split] for split in values.values()] - errors = [[v[1] for v in split] for split in values.values()] - - ax = fig.add_subplot(grid[0,0]) - colors = [cf.blue, cf.dkfz_blue] - plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, errors=errors, colors=colors, ax=ax, legend=False, label_format="{:.1f}", - title="LIDC Results", ylabel=r"3D Perf. (%)", xlabel="Metric", yticklabels=[], ylim=(0,80 if in_percent else 0.8)) - #------ LIDC 2D ------- - - splits = ["Reg R-CNN", "Mask R-CNN"] - values = {"reg2d": [(0.148, 0.046), (0.414, 0.052), (0.468, 0.057)], - "mask2d": [(0.127, 0.034), (0.406, 0.040), (0.447, 0.018)]} - groups = [r"$\mathrm{AVP}_{10}$", "$\mathrm{AP}_{10}$", "Bin Acc."] - if in_percent: - bar_values = [[v[0]*100 for v in split] for split in values.values()] - errors = [[v[1]*100 for v in split] for split in values.values()] - else: - bar_values = [[v[0] for v in split] for split in values.values()] - errors = [[v[1] for v in split] for split in values.values()] - ax = fig.add_subplot(grid[1,0]) - colors = [cf.blue, cf.dkfz_blue] - plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, errors=errors, colors=colors, ax=ax, legend=False, label_format="{:.1f}", - title="", ylabel=r"2D Perf.", xlabel="Metric", xticklabels=xlabels, yticklabels=[], ylim=(None,60 if in_percent else 0.6)) - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, 'lidc_static_results.png'), dpi=700) - -def toy_results_static(xlabels=None, plot_dir=None, in_percent=True): - cf = get_cf('toy', '') - if plot_dir is None: - plot_dir = os.path.join('datasets', 'toy', 'misc') - - text_fs = 18 - fig = plg.plt.figure(figsize=(3, 2.5)) #w,h - grid = plg.plt.GridSpec(2, 1, wspace=0.0, hspace=0.0, figure=fig) #r,c - - #--- Toy 3D ----- - groups = [r"$\mathrm{AVP}_{10}$", "$\mathrm{AP}_{10}$", "Bin Acc."] - splits = ["Reg R-CNN", "Mask R-CNN"]#, "Reg R-CNN 2D", "Mask R-CNN 2D"] - values = {"reg3d": [(0.881, 0.014), (0.998, 0.004), (0.887, 0.014)], - "mask3d": [(0.822, 0.070), (1.0, 0.0), (0.826, 0.069)],} - if in_percent: - bar_values = [[v[0]*100 for v in split] for split in values.values()] - errors = [[v[1]*100 for v in split] for split in values.values()] - else: - bar_values = [[v[0] for v in split] for split in values.values()] - errors = [[v[1] for v in split] for split in values.values()] - ax = fig.add_subplot(grid[0,0]) - colors = [cf.blue, cf.dkfz_blue] - plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, errors=errors, colors=colors, ax=ax, legend=True, label_format="{:.1f}", - title="Toy Results", ylabel=r"3D Perf. (%)", xlabel="Metric", yticklabels=[], ylim=(0,130 if in_percent else .3)) - #------ Toy 2D ------- - groups = [r"$\mathrm{AVP}_{10}$", "$\mathrm{AP}_{10}$", "Bin Acc."] - splits = ["Reg R-CNN", "Mask R-CNN"] - values = {"reg2d": [(0.859, 0.021), (1., 0.0), (0.860, 0.021)], - "mask2d": [(0.748, 0.022), (1., 0.0), (0.748, 0.021)]} - if in_percent: - bar_values = [[v[0]*100 for v in split] for split in values.values()] - errors = [[v[1]*100 for v in split] for split in values.values()] - else: - bar_values = [[v[0] for v in split] for split in values.values()] - errors = [[v[1] for v in split] for split in values.values()] - ax = fig.add_subplot(grid[1,0]) - colors = [cf.blue, cf.dkfz_blue] - plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, errors=errors, colors=colors, ax=ax, legend=False, label_format="{:.1f}", - title="", ylabel=r"2D Perf.", xlabel="Metric", xticklabels=xlabels, yticklabels=[], ylim=(None,130 if in_percent else 1.3)) - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, 'toy_static_results.png'), dpi=700) - -def analyze_test_df(dataset_name, exp_dir='', cf=None, logger=None, plot_dir=None): - evaluator_file = utils.import_module('evaluator', "evaluator.py") - if cf is None: - cf = get_cf(dataset_name, exp_dir) - cf.exp_dir = exp_dir - cf.test_dir = os.path.join(exp_dir, 'test') - if logger is None: - logger = utils.get_logger(cf.exp_dir, False) - evaluator = evaluator_file.Evaluator(cf, logger, mode='test') - - fold_df_paths = sorted([ii for ii in os.listdir(cf.test_dir) if 'test_df.pkl' in ii]) - fold_seg_df_paths = sorted([ii for ii in os.listdir(cf.test_dir) if 'test_seg_df.pkl' in ii]) - metrics_to_score = ['ap', 'auc']#, 'patient_ap', 'patient_auc', 'patient_dice'] #'rg_bin_accuracy_weighted_tp', 'rg_MAE_w_std_weighted_tp'] #cf.metrics - if cf.evaluate_fold_means: - means_to_score = [m for m in metrics_to_score] #+ ['rg_MAE_w_std_weighted_tp'] - #metrics_to_score += ['rg_MAE_std'] - metrics_to_score = [] - - - cf.fold = 'overall' - dfs_list = [pd.read_pickle(os.path.join(cf.test_dir, ii)) for ii in fold_df_paths] - evaluator.test_df = pd.concat(dfs_list, sort=True) - - seg_dfs_list = [pd.read_pickle(os.path.join(cf.test_dir, ii)) for ii in fold_seg_df_paths] - if len(seg_dfs_list) > 0: - evaluator.seg_df = pd.concat(seg_dfs_list, sort=True) - - # stats, _ = evaluator.return_metrics(evaluator.test_df, cf.class_dict) - # results_table_path = os.path.join(cf.exp_dir, "../", "semi_man_summary.csv") - # # ---column headers--- - # col_headers = ["Experiment Name", "CV Folds", "Spatial Dim", "Clustering Kind", "Clustering IoU", "Merge-2D-to-3D IoU"] - # if hasattr(cf, "test_against_exact_gt"): - # col_headers.append('Exact GT') - # for s in stats: - # assert "overall" in s['name'].split(" ")[0] - # if cf.class_dict[cf.patient_class_of_interest] in s['name']: - # for metric in metrics_to_score: - # #if metric in s.keys() and not np.isnan(s[metric]): - # col_headers.append('{}_{} : {}'.format(*s['name'].split(" ")[1:], metric)) - # for mean in means_to_score: - # if mean == "rg_MAE_w_std_weighted_tp": - # col_headers.append('(MAE_fold_mean\u00B1std_fold_mean)\u00B1fold_mean_std\u00B1fold_std_std)'.format(*s['name'].split(" ")[1:], mean)) - # elif mean in s.keys() and not np.isnan(s[mean]): - # col_headers.append('{}_{} : {}'.format(*s['name'].split(" ")[1:], mean)) - # else: - # print("skipping {}".format(mean)) - # with open(results_table_path, 'a') as handle: - # with open(results_table_path, 'r') as doublehandle: - # last_header = doublehandle.readlines() - # if len(last_header)==0 or len(col_headers)!=len(last_header[1].split(",")[:-1]) or \ - # not all([col_headers[ix]==lhix for ix, lhix in enumerate(last_header[1].split(",")[:-1])]): - # handle.write('\n') - # for head in col_headers: - # handle.write(head+',') - # handle.write('\n') - # - # # --- columns content--- - # handle.write('{},'.format(cf.exp_dir.split(os.sep)[-1])) - # handle.write('{},'.format(str(evaluator.test_df.fold.unique().tolist()).replace(",", ""))) - # handle.write('{}D,'.format(cf.dim)) - # handle.write('{},'.format(cf.clustering)) - # handle.write('{},'.format(cf.clustering_iou if cf.clustering else str("N/A"))) - # handle.write('{},'.format(cf.merge_3D_iou if cf.merge_2D_to_3D_preds else str("N/A"))) - # if hasattr(cf, "test_against_exact_gt"): - # handle.write('{},'.format(cf.test_against_exact_gt)) - # for s in stats: - # if cf.class_dict[cf.patient_class_of_interest] in s['name']: - # for metric in metrics_to_score: - # #if metric in s.keys() and not np.isnan(s[metric]): # needed as long as no dice on patient level poss - # handle.write('{:0.3f}, '.format(s[metric])) - # for mean in means_to_score: - # #if metric in s.keys() and not np.isnan(s[metric]): - # if mean=="rg_MAE_w_std_weighted_tp": - # handle.write('({:0.3f}\u00B1{:0.3f})\u00B1({:0.3f}\u00B1{:0.3f}),'.format(*s[mean + "_folds_mean"], *s[mean + "_folds_std"])) - # elif mean in s.keys() and not np.isnan(s[mean]): - # handle.write('{:0.3f}\u00B1{:0.3f},'.format(s[mean+"_folds_mean"], s[mean+"_folds_std"])) - # else: - # print("skipping {}".format(mean)) - # - # handle.write('\n') - - return evaluator.test_df - -def cluster_results_to_df(dataset_name, exp_dir='', overall_df=None, cf=None, logger=None, plot_dir=None): - evaluator_file = utils.import_module('evaluator', "evaluator.py") - if cf is None: - cf = get_cf(dataset_name, exp_dir) - cf.exp_dir = exp_dir - cf.test_dir = os.path.join(exp_dir, 'test') - if logger is None: - logger = utils.get_logger(cf.exp_dir, False) - evaluator = evaluator_file.Evaluator(cf, logger, mode='test') - cf.fold = 'overall' - metrics_to_score = ['ap', 'auc']#, 'patient_ap', 'patient_auc', 'patient_dice'] #'rg_bin_accuracy_weighted_tp', 'rg_MAE_w_std_weighted_tp'] #cf.metrics - if cf.evaluate_fold_means: - means_to_score = [m for m in metrics_to_score] #+ ['rg_MAE_w_std_weighted_tp'] - #metrics_to_score += ['rg_MAE_std'] - metrics_to_score = [] - - # use passed overall_df or, if not given, read dfs from file - if overall_df is None: - fold_df_paths = sorted([ii for ii in os.listdir(cf.test_dir) if 'test_df.pkl' in ii]) - fold_seg_df_paths = sorted([ii for ii in os.listdir(cf.test_dir) if 'test_seg_df.pkl' in ii]) - for paths in [fold_df_paths, fold_seg_df_paths]: - assert len(paths) <= cf.n_cv_splits, "found {} > nr of cv splits results dfs in {}".format(len(paths), cf.test_dir) - dfs_list = [pd.read_pickle(os.path.join(cf.test_dir, ii)) for ii in fold_df_paths] - evaluator.test_df = pd.concat(dfs_list, sort=True) - - # seg_dfs_list = [pd.read_pickle(os.path.join(cf.test_dir, ii)) for ii in fold_seg_df_paths] - # if len(seg_dfs_list) > 0: - # evaluator.seg_df = pd.concat(seg_dfs_list, sort=True) - - else: - evaluator.test_df = overall_df - # todo seg_df if desired - - stats, _ = evaluator.return_metrics(evaluator.test_df, cf.class_dict) - # ---column headers--- - col_headers = ["Experiment Name", "Model", "CV Folds", "Spatial Dim", "Clustering Kind", "Clustering IoU", "Merge-2D-to-3D IoU"] - for s in stats: - assert "overall" in s['name'].split(" ")[0] - if cf.class_dict[cf.patient_class_of_interest] in s['name']: - for metric in metrics_to_score: - #if metric in s.keys() and not np.isnan(s[metric]): - col_headers.append('{}_{} : {}'.format(*s['name'].split(" ")[1:], metric)) - for mean in means_to_score: - if mean in s.keys() and not np.isnan(s[mean]): - col_headers.append('{}_{} : {}'.format(*s['name'].split(" ")[1:], mean+"_folds_mean")) - else: - print("skipping {}".format(mean)) - results_df = pd.DataFrame(columns=col_headers) - # --- columns content--- - row = [] - row.append('{}'.format(cf.exp_dir.split(os.sep)[-1])) - model = 'frcnn' if (cf.model=="mrcnn" and cf.frcnn_mode) else cf.model - row.append('{}'.format(model)) - row.append('{}'.format(str(evaluator.test_df.fold.unique().tolist()).replace(",", ""))) - row.append('{}D'.format(cf.dim)) - row.append('{}'.format(cf.clustering)) - row.append('{}'.format(cf.clustering_iou if cf.clustering else "N/A")) - row.append('{}'.format(cf.merge_3D_iou if cf.merge_2D_to_3D_preds else "N/A")) - for s in stats: - if cf.class_dict[cf.patient_class_of_interest] in s['name']: - for metric in metrics_to_score: - #if metric in s.keys() and not np.isnan(s[metric]): # needed as long as no dice on patient level poss - row.append('{:0.3f} '.format(s[metric])) - for mean in means_to_score: - #if metric in s.keys() and not np.isnan(s[metric]): - if mean+"_folds_mean" in s.keys() and not np.isnan(s[mean+"_folds_mean"]): - row.append('{:0.3f}\u00B1{:0.3f}'.format(s[mean+"_folds_mean"], s[mean+"_folds_std"])) - else: - print("skipping {}".format(mean+"_folds_mean")) - #print("row, clustering, iou, exp", row, cf.clustering, cf.clustering_iou, cf.exp_dir) - results_df.loc[0] = row - - return results_df - -def multiple_clustering_results(dataset_name, exp_dir, plot_dir=None, plot_hist=False): - print("Gathering exp {}".format(exp_dir)) - cf = get_cf(dataset_name, exp_dir) - cf.n_workers = 1 - logger = logging.getLogger("dummy") - logger.setLevel(logging.DEBUG) - #logger.addHandler(logging.StreamHandler()) - cf.exp_dir = exp_dir - cf.test_dir = os.path.join(exp_dir, 'test') - cf.plot_prediction_histograms = False - if plot_dir is None: - #plot_dir = os.path.join(cf.test_dir, 'histograms') - plot_dir = os.path.join("datasets", dataset_name, "misc") - os.makedirs(plot_dir, exist_ok=True) - - # fold_dirs = sorted([os.path.join(cf.exp_dir, f) for f in os.listdir(cf.exp_dir) if - # os.path.isdir(os.path.join(cf.exp_dir, f)) and f.startswith("fold")]) - folds = range(cf.n_cv_splits) - clusterings = {None: ['lol'], 'wbc': [0.0, 0.1, 0.2, 0.3, 0.4], 'nms': [0.0, 0.1, 0.2, 0.3, 0.4]} - #clusterings = {'wbc': [0.1,], 'nms': [0.1,]} - #clusterings = {None: ['lol']} - if plot_hist: - clusterings = {None: ['lol'], 'nms': [0.1, ], 'wbc': [0.1, ]} - class_of_interest = cf.patient_class_of_interest - - try: - if plot_hist: - title_fs, text_fs = 16, 13 - fig = plg.plt.figure(figsize=(11, 8)) #width, height - grid = plg.plt.GridSpec(len(clusterings.keys()), max([len(v) for v in clusterings.values()])+1, wspace=0.0, - hspace=0.0, figure=fig) #rows, cols - plg.plt.suptitle("Faster R-CNN+", fontsize=title_fs, va='bottom', y=0.925) - - results_df = pd.DataFrame() - for cl_ix, (clustering, ious) in enumerate(clusterings.items()): - cf.clustering = clustering - for iou_ix, iou in enumerate(ious): - cf.clustering_iou = iou - print(r"Producing Results for Clustering {} @ IoU {}".format(cf.clustering, cf.clustering_iou)) - overall_test_df = pd.DataFrame() - for fold in folds[:]: - cf.fold = fold - cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(cf.fold)) - - predictor = predictor_file.Predictor(cf, net=None, logger=logger, mode='analysis') - results_list = predictor.load_saved_predictions() - logger.info('starting evaluation...') - evaluator = evaluator_file.Evaluator(cf, logger, mode='test') - evaluator.evaluate_predictions(results_list) - #evaluator.score_test_df(max_fold=100) - overall_test_df = overall_test_df.append(evaluator.test_df) - - results_df = results_df.append(cluster_results_to_df(dataset_name, overall_df=overall_test_df,cf=cf, - logger=logger)) - - if plot_hist: - if clustering=='wbc' and iou_ix==len(ious)-1: - # plot n_missing histogram for last wbc clustering only - out_filename = os.path.join(plot_dir, 'analysis_n_missing_overall_hist_{}_{}.png'.format(clustering, iou)) - ax = fig.add_subplot(grid[cl_ix, iou_ix+1]) - plg.plot_wbc_n_missing(cf, overall_test_df, outfile=out_filename, fs=text_fs, ax=ax) - ax.set_title("WBC Missing Predictions per Cluster.", fontsize=title_fs) - #ax.set_ylabel(r"Average Missing Preds per Cluster (%)") - ax.yaxis.tick_right() - ax.yaxis.set_label_position("right") - ax.text(0.07, 0.87, "{}) WBC".format(chr(len(clusterings.keys())*len(ious)+97)), transform=ax.transAxes, color=cf.white, fontsize=title_fs, - bbox=dict(boxstyle='square', facecolor='black', edgecolor='none', alpha=0.9)) - overall_test_df = overall_test_df[overall_test_df.pred_class == class_of_interest] - overall_test_df = overall_test_df[overall_test_df.det_type!='patient_tn'] - out_filename = "analysis_fold_overall_hist_{}_{}.png".format(clustering, iou) - out_filename = os.path.join(plot_dir, out_filename) - ax = fig.add_subplot(grid[cl_ix, iou_ix]) - plg.plot_prediction_hist(cf, overall_test_df, out_filename, fs=text_fs, ax=ax) - ax.text(0.11, 0.87, "{}) {}".format(chr((cl_ix+1)*len(ious)+96), clustering.upper() if clustering else "Raw Preds"), transform=ax.transAxes, color=cf.white, - bbox=dict(boxstyle='square', facecolor='black', edgecolor='none', alpha=0.9), fontsize=title_fs) - if cl_ix==0 and iou_ix==0: - ax.set_title("Prediction Histograms Malignant Class", fontsize=title_fs) - ax.legend(loc="best", fontsize=text_fs) - else: - ax.set_title("") - #analyze_test_df(dataset_name, cf=cf, logger=logger) - if plot_hist: - #plg.plt.subplots_adjust(top=0.) - plg.plt.savefig(os.path.join(plot_dir, "combined_hist_plot.pdf"), dpi=600, bbox_inches='tight') - - except FileNotFoundError as e: - print("Ignoring exp dir {} due to\n{}".format(exp_dir, e)) - logger.handlers = [] - del cf; del logger - return results_df - -def gather_clustering_results(dataset_name, exp_parent_dir, exps_filter=None, processes=os.cpu_count()//2): - exp_dirs = [os.path.join(exp_parent_dir, i) for i in os.listdir(exp_parent_dir + "/") if - os.path.isdir(os.path.join(exp_parent_dir, i))]#[:1] - if exps_filter is not None: - exp_dirs = [ed for ed in exp_dirs if not exps_filter in ed] - # for debugging - #exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_frcnn3d_cl_bs6" - #exp_dirs = [exp_dir,] - #exp_dirs = ["/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_detfpn2d_cl_bs10",] - - results_df = pd.DataFrame() - - p = NoDaemonProcessPool(processes=processes) - mp_inputs = [(dataset_name, exp_dir) for exp_dir in exp_dirs][:] - results_dfs = p.starmap(multiple_clustering_results, mp_inputs) - p.close() - p.join() - for df in results_dfs: - results_df = results_df.append(df) - - results_df.to_csv(os.path.join(exp_parent_dir, "df_cluster_summary.csv"), index=False) - - return results_df - -def plot_cluster_results_grid(cf, res_df, ylim=None, out_file=None): - """ - :param cf: - :param res_df: results over a single dimension setting (2D or 3D), over all clustering methods and ious. - :param out_file: - :return: - """ - is_2d = np.all(res_df["Spatial Dim"]=="2D") - # pandas has problems with recognising "N/A" string --> replace by None - #res_df['Merge-2D-to-3D IoU'].iloc[res_df['Merge-2D-to-3D IoU'] == "N/A"] = None - n_rows = 3#4 if is_2d else 3 - grid = plg.plt.GridSpec(n_rows, 5, wspace=0.4, hspace=0.3) - - fig = plg.plt.figure(figsize=(11,6)) - - splits = res_df["Model"].unique().tolist() # need to be model names - for split in splits: - assoc_exps = res_df[res_df["Model"]==split]["Experiment Name"].unique() - if len(assoc_exps)>1: - print("Model {} has multiple experiments:\n{}".format(split, assoc_exps)) - #res_df = res_df.where(~(res_df["Model"] == split), res_df["Experiment Name"], axis=0) - raise Exception("Multiple Experiments") - - sort_map = {'detection_fpn': 0, 'mrcnn':1, 'frcnn':2, 'retina_net':3, 'retina_unet':4} - splits.sort(key=sort_map.__getitem__) - #colors = [cf.color_palette[ix+3 % len(cf.color_palette)] for ix in range(len(splits))] - color_map = {'detection_fpn': cf.magenta, 'mrcnn':cf.blue, 'frcnn': cf.dark_blue, 'retina_net': cf.aubergine, 'retina_unet': cf.purple} - - colors = [color_map[split] for split in splits] - alphas = [0.9,] * len(splits) - legend_handles = [] - model_renamer = {'detection_fpn': "Detection U-Net", 'mrcnn': "Mask R-CNN", 'frcnn': "Faster R-CNN+", 'retina_net': "RetinaNet", 'retina_unet': "Retina U-Net"} - - for rix, c_kind in zip([0, 1],['wbc', 'nms']): - kind_df = res_df[res_df['Clustering Kind'] == c_kind] - groups = kind_df['Clustering IoU'].unique() - #for cix, iou in enumerate(groups): - assert np.all([split in splits for split in kind_df["Model"].unique()]) #need to be model names - ax = fig.add_subplot(grid[rix,:]) - bar_values = [kind_df[kind_df["Model"]==split]["rois_malignant : ap_folds_mean"] for split in splits] - bar_stds = [[float(val.split('\u00B1')[1]) for val in split_vals] for split_vals in bar_values] - bar_values = [ [float(val.split('\u00B1')[0]) for val in split_vals] for split_vals in bar_values ] - - - xlabel='' if rix == 0 else "Clustering IoU" - ylabel = str(c_kind.upper()) + " / AP" - lh = plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, colors=colors, alphas=alphas, errors=bar_stds, - ax=ax, ylabel=ylabel, xlabel=xlabel) - legend_handles.append(lh) - if rix == 0: - ax.axes.get_xaxis().set_ticks([]) - #ax.spines['top'].set_visible(False) - #ax.spines['right'].set_visible(False) - ax.spines['bottom'].set_visible(False) - #ax.spines['left'].set_visible(False) - else: - ax.spines['top'].set_visible(False) - #ticklab = ax.xaxis.get_ticklabels() - #trans = ticklab.get_transform() - ax.xaxis.set_label_coords(0.05, -0.05) - ax.set_ylim(0.,ylim) - - if is_2d: - # only 2d-3d merging @ 0.1 - ax = fig.add_subplot(grid[2, 1]) - kind_df = res_df[(res_df['Clustering Kind'] == 'None') & ~(res_df['Merge-2D-to-3D IoU'].isna())] - groups = kind_df['Clustering IoU'].unique() - bar_values = [kind_df[kind_df["Model"] == split]["rois_malignant : ap_folds_mean"] for split in splits] - bar_stds = [[float(val.split('\u00B1')[1]) for val in split_vals] for split_vals in bar_values] - bar_values = np.array([[float(val.split('\u00B1')[0]) for val in split_vals] for split_vals in bar_values]) - lh = plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, colors=colors, alphas=alphas, errors=bar_stds, - ax=ax, ylabel="2D-3D Merging\nOnly / AP") - legend_handles.append(lh) - ax.axes.get_xaxis().set_ticks([]) - ax.spines['top'].set_visible(False) - ax.spines['right'].set_visible(False) - ax.spines['bottom'].set_visible(False) - ax.spines['left'].set_visible(False) - ax.set_ylim(0., ylim) - - next_row = 2 - next_col = 2 - else: - next_row = 2 - next_col = 2 - - # No clustering at all - ax = fig.add_subplot(grid[next_row, next_col]) - kind_df = res_df[(res_df['Clustering Kind'] == 'None') & (res_df['Merge-2D-to-3D IoU'].isna())] - groups = kind_df['Clustering IoU'].unique() - bar_values = [kind_df[kind_df["Model"] == split]["rois_malignant : ap_folds_mean"] for split in splits] - bar_stds = [[float(val.split('\u00B1')[1]) for val in split_vals] for split_vals in bar_values] - bar_values = np.array([[float(val.split('\u00B1')[0]) for val in split_vals] for split_vals in bar_values]) - lh = plg.plot_grouped_bar_chart(cf, bar_values, groups, splits, colors=colors, alphas=alphas, errors=bar_stds, - ax=ax, ylabel="No Clustering / AP") - legend_handles.append(lh) - #plg.suppress_axes_lines(ax) - #ax = fig.add_subplot(grid[next_row, 0]) - #ax.set_ylabel("No Clustering") - #plg.suppress_axes_lines(ax) - ax.axes.get_xaxis().set_ticks([]) - ax.spines['top'].set_visible(False) - ax.spines['right'].set_visible(False) - ax.spines['bottom'].set_visible(False) - ax.spines['left'].set_visible(False) - ax.set_ylim(0., ylim) - - - ax = fig.add_subplot(grid[next_row, 3]) - # awful hot fix: only legend_handles[0] used in order to have same order as in plots. - legend_handles = [plg.mpatches.Patch(color=handle[0], alpha=handle[1], label=model_renamer[handle[2]]) for handle in legend_handles[0]] - ax.legend(handles=legend_handles) - ax.axis('off') - - fig.suptitle('Prostate {} Results over Clustering Settings'.format(res_df["Spatial Dim"].unique().item()), fontsize=14) - - if out_file is not None: - plg.plt.savefig(out_file) - - return - -def get_plot_clustering_results(dataset_name, exp_parent_dir, res_from_file=True, exps_filter=None): - if not res_from_file: - results_df = gather_clustering_results(dataset_name, exp_parent_dir, exps_filter=exps_filter) - else: - results_df = pd.read_csv(os.path.join(exp_parent_dir, "df_cluster_summary.csv")) - if os.path.isfile(os.path.join(exp_parent_dir, "df_cluster_summary_no_clustering_2D.csv")): - results_df = results_df.append(pd.read_csv(os.path.join(exp_parent_dir, "df_cluster_summary_no_clustering_2D.csv"))) - - cf = get_cf(dataset_name) - if np.count_nonzero(results_df["Spatial Dim"] == "3D") >0: - # 3D - plot_cluster_results_grid(cf, results_df[results_df["Spatial Dim"] == "3D"], ylim=0.52, out_file=os.path.join(exp_parent_dir, "cluster_results_3D.pdf")) - if np.count_nonzero(results_df["Spatial Dim"] == "2D") > 0: - # 2D - plot_cluster_results_grid(cf, results_df[results_df["Spatial Dim"]=="2D"], ylim=0.4, out_file=os.path.join(exp_parent_dir, "cluster_results_2D.pdf")) - - -def plot_single_results(cf, exp_dir, plot_files, res_df=None): - out_file = os.path.join(exp_dir, "inference_analysis", "single_results.pdf") - - plot_files = utils.load_obj(plot_files) - batch = plot_files["batch"] - results_dict = plot_files["res_dict"] - cf.roi_items = ['class_targets'] - - class_renamer = {1: "GS 6", 2: "GS $\geq 7$"} - gs_renamer = {60: "6", 71: "7a"} - - if "adcb" in exp_dir: - modality = "adcb" - elif "t2" in exp_dir: - modality = "t2" - else: - modality = "b" - text_fs = 16 - - if modality=="t2": - n_rows, n_cols = 2, 3 - gt_col = 1 - fig_w, fig_h = 14, 4 - input_x, input_y = 0.05, 0.9 - z_ix = 11 - thresh = 0.22 - input_title = "Input" - elif modality=="b": - n_rows, n_cols = 2, 6 - gt_col = 2 # = gt_span - fig_w, fig_h = 14, 4 - input_x, input_y = 0.08, 0.8 - z_ix = 8 - thresh = 0.16 - input_title = " Input" - elif modality=="adcb": - n_rows, n_cols = 2, 7 - gt_col = 3 - fig_w, fig_h = 14, 4 - input_x, input_y = 0.08, 0.8 - z_ix = 8 - thresh = 0.16 - input_title = "Input" - fig_w, fig_h = 12, 3.87 - fig = plg.plt.figure(figsize=(fig_w, fig_h)) - grid = plg.plt.GridSpec(n_rows, n_cols, wspace=0.0, hspace=0.0, figure=fig) - cf.plot_class_ids = True - - if modality=="t2": - ax = fig.add_subplot(grid[:, 0]) - ax.imshow(batch['patient_data'][0, 0, :, :, z_ix], cmap='gray') - ax.set_title("Input", size=text_fs) - ax.text(0.05, 0.9, "T2", size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis("off") - elif modality=="b": - for m_ix, b in enumerate([50, 500, 1000, 1500]): - ax = fig.add_subplot(grid[int(np.round(m_ix/4+0.0001)), m_ix%2]) - print(int(np.round(m_ix/4+0.0001)), m_ix%2) - ax.imshow(batch['patient_data'][0, m_ix, :, :, z_ix], cmap='gray') - ax.text(input_x, input_y, r"{}{}".format("$b=$" if m_ix==0 else "", b), size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis("off") - if b==50: - ax.set_title(input_title, size=text_fs) - elif modality=="adcb": - for m_ix, b in enumerate(["ADC", 50, 500, 1000, 1500]): - p_ix = m_ix + 1 if m_ix>2 else m_ix - ax = fig.add_subplot(grid[int(np.round(p_ix/6+0.0001)), p_ix%3]) - print(int(np.round(p_ix/4+0.0001)), p_ix%2) - ax.imshow(batch['patient_data'][0, m_ix, :, :, z_ix], cmap='gray') - ax.text(input_x, input_y, r"{}{}".format("$b=$" if m_ix==1 else "", b), size=text_fs, color=cf.white, transform=ax.transAxes, - bbox=dict(facecolor=cf.black, alpha=0.7, edgecolor=cf.white, clip_on=False, pad=7)) - ax.axis("off") - if b==50: - ax.set_title(input_title, size=text_fs) - - ax_gt = fig.add_subplot(grid[:, gt_col:gt_col+2]) # GT - ax_pred = fig.add_subplot(grid[:, gt_col+2:gt_col+4]) # Prediction - #ax.imshow(batch['patient_data'][0, 0, :, :, z_ix], cmap='gray') - #ax.imshow(batch['patient_data'][0, 0, :, :, z_ix], cmap='gray') - #ax.imshow(plg.to_rgba(batch['patient_seg'][0,0,:,:,z_ix], cf.cmap), alpha=0.8) - plg.view_batch_thesis(cf, batch, res_dict=results_dict, legend=True, sample_picks=None, patient_items=True, - vol_slice_picks=[z_ix,], show_gt_labels=True, box_score_thres=thresh, plot_mods=True, - out_file=None, dpi=600, return_fig=False, axes={'gt':ax_gt, 'pred':ax_pred}, fontsize=text_fs) - - - ax_gt.set_title("Ground Truth", size=text_fs) - ax_pred.set_title("Prediction", size=text_fs) - texts = list(ax_gt.texts) - ax_gt.texts = [] - for text in texts: - cl_id = int(text.get_text()) - x, y = text.get_position() - text_str = "GS="+str(gs_renamer[cf.class_id2label[cl_id].gleasons[0]]) - ax_gt.text(x-4*text_fs//2, y, text_str, color=text.get_color(), - fontsize=text_fs, bbox=dict(facecolor=text.get_bbox_patch().get_facecolor(), alpha=0.7, edgecolor='none', clip_on=True, pad=0)) - texts = list(ax_pred.texts) - ax_pred.texts = [] - for text in texts: - x, y = text.get_position() - x -= 4 * text_fs // 2 - try: - cl_id = int(text.get_text()) - text_str = class_renamer[cl_id] - except ValueError: - text_str = text.get_text() - if text.get_bbox_patch().get_facecolor()[:3]==cf.dark_green: - x -= 4* text_fs - ax_pred.text(x, y, text_str, color=text.get_color(), - fontsize=text_fs, bbox=dict(facecolor=text.get_bbox_patch().get_facecolor(), alpha=0.7, edgecolor='none', clip_on=True, pad=0)) - - ax_gt.axis("off") - ax_pred.axis("off") - - plg.plt.tight_layout() - - if out_file is not None: - plg.plt.savefig(out_file, dpi=600, bbox_inches='tight') - - - - return - -def find_suitable_examples(exp_dir1, exp_dir2): - test_df1 = analyze_test_df('lidc',exp_dir1) - test_df2 = analyze_test_df('lidc', exp_dir2) - test_df1 = test_df1[test_df1.pred_score>0.3] - test_df2 = test_df2[test_df2.pred_score > 0.3] - - tp_df1 = test_df1[test_df1.det_type == 'det_tp'] - - tp_pids = tp_df1.pid.unique() - tp_fp_pids = test_df2[(test_df2.pid.isin(tp_pids)) & - ((test_df2.regressions-test_df2.rg_targets).abs()>1)].pid.unique() - cand_df = tp_df1[tp_df1.pid.isin(tp_fp_pids)] - sorter = (cand_df.regressions - cand_df.rg_targets).abs().argsort() - cand_df = cand_df.iloc[sorter] - print("Good guesses for examples: ", cand_df.pid.unique()[:20]) - return - -def plot_single_results_lidc(): - dataset_name = 'lidc' - exp_dir1 = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rg_copiedparams' - exp_dir2 = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rgbin_copiedparams' - cf = get_cf(dataset_name, exp_dir1) - #file_names = [f_name for f_name in os.listdir(os.path.join(exp_dir, 'inference_analysis')) if f_name.endswith('.pkl')] - # file_names = [os.path.join(exp_dir, "inference_analysis", f_name) for f_name in file_names] - file_names = ['bytes_merged_boxes_fold_0_pid_0296a.pkl', 'bytes_merged_boxes_fold_2_pid_0416a.pkl', - 'bytes_merged_boxes_fold_1_pid_0635a.pkl', "bytes_merged_boxes_fold_0_pid_0811a.pkl", - "bytes_merged_boxes_fold_0_pid_0969a.pkl", - # 'bytes_merged_boxes_fold_0_pid_0484a.pkl', 'bytes_merged_boxes_fold_0_pid_0492a.pkl', - # 'bytes_merged_boxes_fold_0_pid_0505a.pkl','bytes_merged_boxes_fold_2_pid_0164a.pkl', - # 'bytes_merged_boxes_fold_3_pid_0594a.pkl', - - - ] - z_ics = [167, 159, - 107, 194, - 177, - # 84, 145, - # 212, 219, - # 67 - ] - plot_files = [ - {'files': [os.path.join(exp_dir, "inference_analysis", f_name) for exp_dir in [exp_dir1, exp_dir2]], - 'z_ix': z_ix} for (f_name, z_ix) in zip(file_names, z_ics) - ] - - info_df_path = '/mnt/HDD2TB/Documents/data/lidc/pp_20190318/patient_gts_{}/info_df.pickle'.format(cf.training_gts) - info_df = pd.read_pickle(info_df_path) - - #cf.training_gts = 'sa' - cf.roi_items = ['regression_targets', 'rg_bin_targets_sa'] #['class_targets'] + cf.observables_rois - - text_fs = 8 - fig = plg.plt.figure(figsize=(6, 9.9)) #w, h - #fig = plg.plt.figure(figsize=(6, 6.5)) - #fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(len(plot_files), 3, wspace=0.0, hspace=0.0, figure=fig) #rows, cols - cf.plot_class_ids = True - - - for f_ix, pack in enumerate(plot_files): - z_ix = plot_files[f_ix]['z_ix'] - for model_ix in range(2)[::-1]: - print("f_ix, m_ix", f_ix, model_ix) - plot_file = utils.load_obj(plot_files[f_ix]['files'][model_ix]) - batch = plot_file["batch"] - pid = batch["pid"][0] - batch['patient_rg_bin_targets_sa'] = info_df[info_df.pid == pid]['class_target'].tolist() - # apply same filter as with merged GTs: need at least two non-zero votes to consider a RoI. - batch['patient_rg_bin_targets_sa'] = [[four_votes for four_votes in batch_el if - np.count_nonzero(four_votes>0)>=2] for batch_el in - batch['patient_rg_bin_targets_sa']] - results_dict = plot_file["res_dict"] - - # pred - ax = fig.add_subplot(grid[f_ix, model_ix+1]) - plg.view_batch_thesis(cf, batch, res_dict=results_dict, legend=True, sample_picks=None, - vol_slice_picks=[z_ix, ], show_gt_labels=True, box_score_thres=0.2, - plot_mods=False, - out_file=None, dpi=600, patient_items=True, return_fig=False, - axes={'pred': ax}) - if f_ix==0: - ax.set_title("{}".format("Reg R-CNN" if model_ix==0 else "Mask R-CNN"), size=text_fs*1.3) - else: - ax.set_title("") - - ax.axis("off") - #grid.tight_layout(fig) - - # GT - if model_ix==0: - ax = fig.add_subplot(grid[f_ix, 0]) - # ax.imshow(batch['patient_data'][0, 0, :, :, z_ix], cmap='gray') - # ax.imshow(plg.to_rgba(batch['patient_seg'][0,0,:,:,z_ix], cf.cmap), alpha=0.8) - boxes_fig = plg.view_batch_thesis(cf, batch, res_dict=results_dict, legend=True, sample_picks=None, - vol_slice_picks=[z_ix, ], show_gt_labels=True, box_score_thres=0.1, - plot_mods=False, seg_cmap="rg", - out_file=None, dpi=600, patient_items=True, return_fig=False, - axes={'gt':ax}) - ax.set_ylabel(r"$\mathbf{"+chr(f_ix+97)+")}$ " + ax.get_ylabel()) - ax.set_ylabel("") - if f_ix==0: - ax.set_title("Ground Truth", size=text_fs*1.3) - else: - ax.set_title("") - - - #fig_patches = fig_leg.get_patches() - patches= [plg.mpatches.Patch(color=label.color, label="{:.10s}".format(label.name)) for label in cf.bin_id2label.values() if not label.id in [0,]] - #fig.legends.append(fig_leg) - plg.plt.figlegend(handles=patches, loc="lower center", bbox_to_anchor=(0.5, 0.0), borderaxespad=0., - ncol=len(patches), bbox_transform=fig.transFigure, title="Binned Malignancy Score", - fontsize= text_fs) - plg.plt.tight_layout() - out_file = os.path.join(exp_dir1, "inference_analysis", "lidc_example_results_solarized.pdf") - if out_file is not None: - plg.plt.savefig(out_file, dpi=600, bbox_inches='tight') - - -def box_clustering(exp_dir='', plot_dir=None): - import datasets.prostate.data_loader as dl - cf = get_cf('prostate', exp_dir) - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('datasets', 'prostate', 'misc') - - fig = plg.plt.figure(figsize=(10, 4)) - #fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(2, 3, wspace=0.0, hspace=0., figure=fig) - fs = 14 - xyA = (.9, 0.5) - xyB = (0.05, .5) - - patch_size = np.array([200, 320]) - clustering_iou = 0.1 - img_y, img_x = patch_size - - boxes = [ - {'box_coords': [img_y * 0.2, img_x * 0.04, img_y * 0.55, img_x * 0.31], 'box_score': 0.45, 'box_cl': 1, - 'regression': 2., 'rg_bin': cf.rg_val_to_bin_id(1.), - 'box_patch_center_factor': 1., 'ens_ix': 1, 'box_n_overlaps': 1.}, - {'box_coords': [img_y*0.05, img_x*0.05, img_y*0.5, img_x*0.3], 'box_score': 0.85, 'box_cl': 2, - 'regression': 1., 'rg_bin': cf.rg_val_to_bin_id(1.), - 'box_patch_center_factor': 1., 'ens_ix':1, 'box_n_overlaps':1.}, - {'box_coords': [img_y * 0.1, img_x * 0.2, img_y * 0.4, img_x * 0.7], 'box_score': 0.95, 'box_cl': 2, - 'regression': 1., 'rg_bin': cf.rg_val_to_bin_id(1.), - 'box_patch_center_factor': 1., 'ens_ix':1, 'box_n_overlaps':1.}, - {'box_coords': [img_y * 0.80, img_x * 0.35, img_y * 0.95, img_x * 0.85], 'box_score': 0.6, 'box_cl': 2, - 'regression': 1., 'rg_bin': cf.rg_val_to_bin_id(1.), - 'box_patch_center_factor': 1., 'ens_ix': 1, 'box_n_overlaps': 1.}, - {'box_coords': [img_y * 0.85, img_x * 0.4, img_y * 0.93, img_x * 0.9], 'box_score': 0.85, 'box_cl': 2, - 'regression': 1., 'rg_bin': cf.rg_val_to_bin_id(1.), - 'box_patch_center_factor': 1., 'ens_ix':1, 'box_n_overlaps':1.}, - ] - for box in boxes: - c = box['box_coords'] - box_centers = np.array([(c[ii + 2] - c[ii]) / 2 for ii in range(len(c) // 2)]) - box['box_patch_center_factor'] = np.mean( - [norm.pdf(bc, loc=pc, scale=pc * 0.8) * np.sqrt(2 * np.pi) * pc * 0.8 for bc, pc in - zip(box_centers, patch_size / 2)]) - print("pc fact", box['box_patch_center_factor']) - - box_coords = np.array([box['box_coords'] for box in boxes]) - box_scores = np.array([box['box_score'] for box in boxes]) - box_cl_ids = np.array([box['box_cl'] for box in boxes]) - ax0 = fig.add_subplot(grid[:,:2]) - plg.plot_boxes(cf, box_coords, patch_size, box_scores, box_cl_ids, out_file=os.path.join(plot_dir, "demo_boxes_unclustered.png"), ax=ax0) - ax0.text(*xyA, 'a) Raw ', horizontalalignment='right', verticalalignment='center', transform=ax0.transAxes, - weight='bold', fontsize=fs) - - nms_boxes = [] - for cl in range(1,3): - cl_boxes = [box for box in boxes if box['box_cl'] == cl ] - box_coords = np.array([box['box_coords'] for box in cl_boxes]) - box_scores = np.array([box['box_score'] for box in cl_boxes]) - if 0 not in box_scores.shape: - keep_ix = mutils.nms_numpy(box_coords, box_scores, thresh=clustering_iou) - else: - keep_ix = [] - nms_boxes += [cl_boxes[ix] for ix in keep_ix] - box_coords = np.array([box['box_coords'] for box in nms_boxes]) - box_scores = np.array([box['box_score'] for box in nms_boxes]) - box_cl_ids = np.array([box['box_cl'] for box in nms_boxes]) - ax1 = fig.add_subplot(grid[1, 2]) - nms_color = cf.black - plg.plot_boxes(cf, box_coords, patch_size, box_scores, box_cl_ids, out_file=os.path.join(plot_dir, "demo_boxes_nms_iou_{}.png".format(clustering_iou)), ax=ax1) - ax1.text(*xyB, ' c) NMS', horizontalalignment='left', verticalalignment='center', transform=ax1.transAxes, - weight='bold', color=nms_color, fontsize=fs) - - #------ WBC ------------------- - regress_flag = False - - wbc_boxes = [] - for cl in range(1,3): - cl_boxes = [box for box in boxes if box['box_cl'] == cl] - box_coords = np.array([box['box_coords'] for box in cl_boxes]) - box_scores = np.array([box['box_score'] for box in cl_boxes]) - box_center_factor = np.array([b['box_patch_center_factor'] for b in cl_boxes]) - box_n_overlaps = np.array([b['box_n_overlaps'] for b in cl_boxes]) - box_ens_ix = np.array([b['ens_ix'] for b in cl_boxes]) - box_regressions = np.array([b['regression'] for b in cl_boxes]) if regress_flag else None - box_rg_bins = np.array([b['rg_bin'] if 'rg_bin' in b.keys() else float('NaN') for b in cl_boxes]) - box_rg_uncs = np.array([b['rg_uncertainty'] if 'rg_uncertainty' in b.keys() else float('NaN') for b in cl_boxes]) - if 0 not in box_scores.shape: - keep_scores, keep_coords, keep_n_missing, keep_regressions, keep_rg_bins, keep_rg_uncs = \ - predictor_file.weighted_box_clustering(box_coords, box_scores, box_center_factor, box_n_overlaps, box_rg_bins, box_rg_uncs, - box_regressions, box_ens_ix, clustering_iou, n_ens=1) - - for boxix in range(len(keep_scores)): - clustered_box = {'box_type': 'det', 'box_coords': keep_coords[boxix], - 'box_score': keep_scores[boxix], 'cluster_n_missing': keep_n_missing[boxix], - 'box_pred_class_id': cl} - if regress_flag: - clustered_box.update({'regression': keep_regressions[boxix], - 'rg_uncertainty': keep_rg_uncs[boxix], - 'rg_bin': keep_rg_bins[boxix]}) - wbc_boxes.append(clustered_box) - - box_coords = np.array([box['box_coords'] for box in wbc_boxes]) - box_scores = np.array([box['box_score'] for box in wbc_boxes]) - box_cl_ids = np.array([box['box_pred_class_id'] for box in wbc_boxes]) - ax2 = fig.add_subplot(grid[0, 2]) - wbc_color = cf.black - plg.plot_boxes(cf, box_coords, patch_size, box_scores, box_cl_ids, out_file=os.path.join(plot_dir, "demo_boxes_wbc_iou_{}.png".format(clustering_iou)), ax=ax2) - ax2.text(*xyB, ' b) WBC', horizontalalignment='left', verticalalignment='center', transform=ax2.transAxes, - weight='bold', color=wbc_color, fontsize=fs) - # ax2.spines['bottom'].set_color(wbc_color) - # ax2.spines['top'].set_color(wbc_color) - # ax2.spines['right'].set_color(wbc_color) - # ax2.spines['left'].set_color(wbc_color) - - from matplotlib.patches import ConnectionPatch - con = ConnectionPatch(xyA=xyA, xyB=xyB, coordsA="axes fraction", coordsB="axes fraction", - axesA=ax0, axesB=ax2, color=wbc_color, lw=1.5, arrowstyle='-|>') - ax0.add_artist(con) - - con = ConnectionPatch(xyA=xyA, xyB=xyB, coordsA="axes fraction", coordsB="axes fraction", - axesA=ax0, axesB=ax1, color=nms_color, lw=1.5, arrowstyle='-|>') - ax0.add_artist(con) - # ax0.text(0.5, 0.5, "Test", size=30, va="center", ha="center", rotation=30, - # bbox=dict(boxstyle="angled,pad=0.5", alpha=0.2)) - plg.plt.tight_layout() - plg.plt.savefig(os.path.join(plot_dir, "box_clustering.pdf"), bbox_inches='tight') - -def sketch_AP_AUC(plot_dir=None, draw_auc=True): - from sklearn.metrics import roc_curve, roc_auc_score - from understanding_metrics import get_det_types - import matplotlib.transforms as mtrans - cf = get_cf('prostate', '') - if plot_dir is None: - plot_dir = cf.plot_dir if hasattr(cf, 'plot_dir') else os.path.join('.') - - if draw_auc: - fig = plg.plt.figure(figsize=(7, 6)) #width, height - # fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(2, 2, wspace=0.23, hspace=.45, figure=fig) #rows, cols - else: - fig = plg.plt.figure(figsize=(12, 3)) #width, height - # fig.subplots_adjust(hspace=0, wspace=0) - grid = plg.plt.GridSpec(1, 3, wspace=0.23, hspace=.45, figure=fig) #rows, cols - fs = 13 - text_fs = 11 - optim_color = cf.dark_green - non_opt_color = cf.aubergine - - df = pd.DataFrame(columns=['pred_score', 'class_label', 'pred_class', 'det_type', 'match_iou']) - df2 = df.copy() - df["pred_score"] = [0,0.3,0.25,0.2, 0.8, 0.9, 0.9, 0.9, 0.9] - df["class_label"] = [0,0,0,0, 1, 1, 1, 1, 1] - df["det_type"] = get_det_types(df) - df["match_iou"] = [0.1] * len(df) - - df2["pred_score"] = [0, 0.77, 0.5, 1., 0.5, 0.35, 0.3, 0., 0.7, 0.85, 0.9] - df2["class_label"] = [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1] - df2["det_type"] = get_det_types(df2) - df2["match_iou"] = [0.1] * len(df2) - - #------ PRC ------- - # optimal - if draw_auc: - ax = fig.add_subplot(grid[1, 0]) - else: - ax = fig.add_subplot(grid[0, 2]) - pr, rc = evaluator_file.compute_prc(df) - ax.plot(rc, pr, color=optim_color, label="Optimal Detection") - ax.fill_between(rc, pr, alpha=0.33, color=optim_color) - - # suboptimal - pr, rc = evaluator_file.compute_prc(df2) - ax.plot(rc, pr, color=non_opt_color, label="Suboptimal") - ax.fill_between(rc, pr, alpha=0.33, color=non_opt_color) - #plt.title() - #plt.legend(loc=3 if c == 'prc' else 4) - ax.set_ylabel('precision', fontsize=text_fs) - ax.set_ylim((0., 1.1)) - ax.set_xlabel('recall', fontsize=text_fs) - ax.set_title('Precision-Recall Curves', fontsize=fs) - #ax.legend(ncol=2, loc='center')#, bbox_to_anchor=(0.5, 1.05)) - - - #---- ROC curve - if draw_auc: - ax = fig.add_subplot(grid[1, 1]) - roc = roc_curve(df.class_label.tolist(), df.pred_score.tolist()) - ax.plot(roc[0], roc[1], color=optim_color) - ax.fill_between(roc[0], roc[1], alpha=0.33, color=optim_color) - ax.set_xlabel('false-positive rate', fontsize=text_fs) - ax.set_ylim((0., 1.1)) - ax.set_ylabel('recall', fontsize=text_fs) - - roc = roc_curve(df2.class_label.tolist(), df2.pred_score.tolist()) - ax.plot(roc[0], roc[1], color=non_opt_color) - ax.fill_between(roc[0], roc[1], alpha=0.33, color=non_opt_color) - - roc = ([0, 1], [0, 1]) - ax.plot(roc[0], roc[1], color=cf.gray, linestyle='dashed', label="random predictor") - - ax.set_title('ROC Curves', fontsize=fs) - ax.legend(ncol=2, loc='lower right', fontsize=text_fs) - - #--- hist optimal - text_left = 0.05 - ax = fig.add_subplot(grid[0, 0]) - tn_count = df.det_type.tolist().count('det_tn') - AUC = roc_auc_score(df.class_label, df.pred_score) - df = df[(df.det_type=="det_tp") | (df.det_type=="det_fp") | (df.det_type=="det_fn")] - labels = df.class_label.values - preds = df.pred_score.values - type_list = df.det_type.tolist() - - ax.hist(preds[labels == 0], alpha=0.3, color=cf.red, range=(0, 1), bins=50, label="FP") - ax.hist(preds[labels == 1], alpha=0.3, color=cf.blue, range=(0, 1), bins=50, label="FN at score 0 and TP") - #ax.axvline(x=cf.min_det_thresh, alpha=0.4, color=cf.orange, linewidth=1.5, label="min det thresh") - fp_count = type_list.count('det_fp') - fn_count = type_list.count('det_fn') - tp_count = type_list.count('det_tp') - pos_count = fn_count + tp_count - if draw_auc: - text = "AP: {:.2f} ROC-AUC: {:.2f}\n".format(evaluator_file.get_roi_ap_from_df((df, 0.0, False)), AUC) - else: - text = "AP: {:.2f}\n".format(evaluator_file.get_roi_ap_from_df((df, 0.0, False))) - text += 'TP: {} FP: {} FN: {} TN: {}\npositives: {}'.format(tp_count, fp_count, fn_count, tn_count, pos_count) - - ax.text(text_left,4, text, fontsize=text_fs) - ax.set_yscale('log') - ax.set_ylim(bottom=10**-2, top=10**2) - ax.set_xlabel("prediction score", fontsize=text_fs) - ax.set_ylabel("occurences", fontsize=text_fs) - #autoAxis = ax.axis() - # rec = plg.mpatches.Rectangle((autoAxis[0] - 0.7, autoAxis[2] - 0.2), (autoAxis[1] - autoAxis[0]) + 1, - # (autoAxis[3] - autoAxis[2]) + 0.4, fill=False, lw=2) - # rec = plg.mpatches.Rectangle((autoAxis[0] , autoAxis[2] ), (autoAxis[1] - autoAxis[0]) , - # (autoAxis[3] - autoAxis[2]) , fill=False, lw=2, color=optim_color) - # rec = ax.add_patch(rec) - # rec.set_clip_on(False) - plg.plt.setp(ax.spines.values(), color=optim_color, linewidth=2) - ax.set_facecolor((*optim_color,0.1)) - ax.set_title("Detection Histograms", fontsize=fs) - - ax = fig.add_subplot(grid[0, 1]) - tn_count = df2.det_type.tolist().count('det_tn') - AUC = roc_auc_score(df2.class_label, df2.pred_score) - df2 = df2[(df2.det_type=="det_tp") | (df2.det_type=="det_fp") | (df2.det_type=="det_fn")] - labels = df2.class_label.values - preds = df2.pred_score.values - type_list = df2.det_type.tolist() - - ax.hist(preds[labels == 0], alpha=0.3, color=cf.red, range=(0, 1), bins=50, label="FP") - ax.hist(preds[labels == 1], alpha=0.3, color=cf.blue, range=(0, 1), bins=50, label="FN at score 0 and TP") - # ax.axvline(x=cf.min_det_thresh, alpha=0.4, color=cf.orange, linewidth=1.5, label="min det thresh") - fp_count = type_list.count('det_fp') - fn_count = type_list.count('det_fn') - tp_count = type_list.count('det_tp') - pos_count = fn_count + tp_count - if draw_auc: - text = "AP: {:.2f} ROC-AUC: {:.2f}\n".format(evaluator_file.get_roi_ap_from_df((df2, 0.0, False)), AUC) - else: - text = "AP: {:.2f}\n".format(evaluator_file.get_roi_ap_from_df((df2, 0.0, False))) - text += 'TP: {} FP: {} FN: {} TN: {}\npositives: {}'.format(tp_count, fp_count, fn_count, tn_count, pos_count) - - ax.text(text_left, 4*10**0, text, fontsize=text_fs) - ax.set_yscale('log') - ax.margins(y=10e2) - ax.set_ylim(bottom=10**-2, top=10**2) - ax.set_xlabel("prediction score", fontsize=text_fs) - ax.set_yticks([]) - plg.plt.setp(ax.spines.values(), color=non_opt_color, linewidth=2) - ax.set_facecolor((*non_opt_color, 0.05)) - ax.legend(ncol=2, loc='upper center', bbox_to_anchor=(0.5, 1.18), fontsize=text_fs) - - if draw_auc: - # Draw a horizontal line - line = plg.plt.Line2D([0.1, .9], [0.48, 0.48], transform=fig.transFigure, color="black") - fig.add_artist(line) - - outfile = os.path.join(plot_dir, "metrics.png") - print("Saving plot to {}".format(outfile)) - plg.plt.savefig(outfile, bbox_inches='tight', dpi=600) - - return - -def draw_toy_cylinders(plot_dir=None): - source_path = "datasets/toy" - if plot_dir is None: - plot_dir = os.path.join(source_path, "misc") - #plot_dir = '/home/gregor/Dropbox/Thesis/Main/tmp' - os.makedirs(plot_dir, exist_ok=True) - - cf = get_cf('toy', '') - cf.pre_crop_size = [2200, 2200,1] #y,x,z; - #cf.dim = 2 - cf.ambiguities = {"radius_calib": (1., 1. / 6) } - cf.pp_blur_min_intensity = 0.2 - - generate_toys = utils.import_module("generate_toys", os.path.join(source_path, 'generate_toys.py')) - ToyGen = generate_toys.ToyGenerator(cf) - - fig = plg.plt.figure(figsize=(10, 8.2)) #width, height - grid = plg.plt.GridSpec(4, 5, wspace=0.0, hspace=.0, figure=fig) #rows, cols - fs, text_fs = 16, 14 - text_x, text_y = 0.5, 0.85 - true_gt_col, dist_gt_col = cf.dark_green, cf.blue - true_cmap = {1:true_gt_col} - - img = np.random.normal(loc=0.0, scale=cf.noise_scale, size=ToyGen.sample_size) - img[img < 0.] = 0. - # one-hot-encoded seg - seg = np.zeros((cf.num_classes + 1, *ToyGen.sample_size)).astype('uint8') - undistorted_seg = np.copy(seg) - applied_gt_distort = False - - class_id, shape = 1, 'cylinder' - #all_radii = ToyGen.generate_sample_radii(class_ids, shapes) - enlarge_f = 20 - all_radii = np.array([np.mean(label.bin_vals) if label.id!=5 else label.bin_vals[0]+5 for label in cf.bin_labels if label.id!=0]) - bins = [(min(label.bin_vals), max(label.bin_vals)) for label in cf.bin_labels] - bin_edges = [(bins[i][1] + bins[i + 1][0])*enlarge_f / 2 for i in range(len(bins) - 1)] - all_radii = [np.array([r*enlarge_f, r*enlarge_f, 1]) for r in all_radii] # extend to required 3D format - regress_targets, undistorted_rg_targets = [], [] - ics = np.argwhere(np.ones(seg[0].shape)) # indices ics equal positions within img/volume - center = np.array([dim//2 for dim in img.shape]) - - # for illustrating GT distribution, keep scale same size - #x = np.linspace(mu - 300, mu + 300, 100) - x = np.linspace(0, 50*enlarge_f, 500) - ax_gauss = fig.add_subplot(grid[3, :]) - mus, sigmas = [], [] - - for roi_ix, radii in enumerate(all_radii): - print('processing {} {}'.format(roi_ix, radii)) - cur_img, cur_seg, cur_undistorted_seg, cur_regress_targets, cur_undistorted_rg_targets, cur_applied_gt_distort = \ - ToyGen.draw_object(img.copy(), seg.copy(), undistorted_seg, ics, regress_targets, undistorted_rg_targets, applied_gt_distort, - roi_ix, class_id, shape, np.copy(radii), center) - - ax = fig.add_subplot(grid[0,roi_ix]) - ax.imshow(cur_img[...,0], cmap='gray', vmin=0) - ax.set_title("r{}".format(roi_ix+1), fontsize=fs) - if roi_ix==0: - ax.set_ylabel(r"$\mathbf{a)}$ Input", fontsize=fs) - plg.suppress_axes_lines(ax) - else: - ax.axis('off') - - ax = fig.add_subplot(grid[1, roi_ix]) - ax.imshow(cur_img[..., 0], cmap='gray') - ax.imshow(plg.to_rgba(np.argmax(cur_undistorted_seg[...,0], axis=0), true_cmap), alpha=0.8) - ax.text(text_x, text_y, r"$r_{a}=$"+"{:.1f}".format(cur_undistorted_rg_targets[roi_ix][0]/enlarge_f), transform=ax.transAxes, - color=cf.white, bbox=dict(facecolor=true_gt_col, alpha=0.7, edgecolor=cf.white, clip_on=False,pad=2.5), - fontsize=text_fs, ha='center', va='center') - if roi_ix==0: - ax.set_ylabel(r"$\mathbf{b)}$ Exact GT", fontsize=fs) - plg.suppress_axes_lines(ax) - else: - ax.axis('off') - ax = fig.add_subplot(grid[2, roi_ix]) - ax.imshow(cur_img[..., 0], cmap='gray') - ax.imshow(plg.to_rgba(np.argmax(cur_seg[..., 0], axis=0), cf.cmap), alpha=0.7) - ax.text(text_x, text_y, r"$r_{a}=$"+"{:.1f}".format(cur_regress_targets[roi_ix][0]/enlarge_f), transform=ax.transAxes, - color=cf.white, bbox=dict(facecolor=cf.blue, alpha=0.7, edgecolor=cf.white, clip_on=False,pad=2.5), - fontsize=text_fs, ha='center', va='center') - if roi_ix == 0: - ax.set_ylabel(r"$\mathbf{c)}$ Noisy GT", fontsize=fs) - plg.suppress_axes_lines(ax) - else: - ax.axis('off') - - # GT distributions - assert radii[0]==radii[1] - mu, sigma = radii[0], radii[0] * cf.ambiguities["radius_calib"][1] - ax_gauss.axvline(mu, color=true_gt_col) - ax_gauss.text(mu, -0.003, "$r=${:.0f}".format(mu/enlarge_f), color=true_gt_col, fontsize=text_fs, ha='center', va='center', - bbox = dict(facecolor='none', alpha=0.7, edgecolor=true_gt_col, clip_on=False, pad=2.5)) - mus.append(mu); sigmas.append(sigma) - lower_bound = max(bin_edges[roi_ix], min(x))# if roi_ix>0 else 2*mu-bin_edges[roi_ix+1] - upper_bound = bin_edges[roi_ix+1] if len(bin_edges)>roi_ix+1 else max(x)#2*mu-bin_edges[roi_ix] - if roi_ix, head_length = 0.05, head_width = .005", lw=1)) - #ax_gauss.arrow(1, 0.5, 0., 0.1) - handles = [plg.mpatches.Patch(facecolor=dist_gt_col, label='Inexact Seg.', alpha=0.7, edgecolor='none'), - mlines.Line2D([], [], color=dist_gt_col, marker=r'$\curlywedge$', linestyle='none', markersize=11, label='GT Sampling Distr.'), - mlines.Line2D([], [], color=true_gt_col, marker='|', markersize=12, label='Exact GT Radius.', linestyle='none'), - plg.mpatches.Patch(facecolor=true_gt_col, label='a)-c) Exact Seg., d) Bin', alpha=0.7, edgecolor='none')] - fig.legend(handles=handles, loc="lower center", ncol=len(handles), fontsize=text_fs) - outfile = os.path.join(plot_dir, "toy_cylinders.png") - print("Saving plot to {}".format(outfile)) - plg.plt.savefig(outfile, bbox_inches='tight', dpi=600) - - - return - -def seg_det_cityscapes_example(plot_dir=None): - cf = get_cf('cityscapes', '') - source_path = "datasets/cityscapes" - if plot_dir is None: - plot_dir = os.path.join(source_path, "misc") - os.makedirs(plot_dir, exist_ok=True) - - - dl = utils.import_module("dl", os.path.join(source_path, 'data_loader.py')) - #from utils.dataloader_utils import ConvertSegToBoundingBoxCoordinates - data_set = dl.Dataset(cf) - Converter = dl.ConvertSegToBoundingBoxCoordinates(2, cf.roi_items) - - fig = plg.plt.figure(figsize=(9, 3)) #width, height - grid = plg.plt.GridSpec(1, 2, wspace=0.05, hspace=.0, figure=fig) #rows, cols - fs, text_fs = 12, 10 - - nice_imgs = ["bremen000099000019", "hamburg000000033506", "frankfurt000001058914",] - img_id = nice_imgs[2] - #img_id = np.random.choice(data_set.set_ids) - - - print("Selected img", img_id) - img = np.load(data_set[img_id]["img"]).transpose(1,2,0) - seg = np.load(data_set[img_id]["seg"]) - cl_targs = data_set[img_id]["class_targets"] - roi_ids = np.unique(seg[seg > 0]) - # ---- detection example ----- - cl_id2name = {1: "h", 2: "v"} - color_palette = [cf.purple, cf.aubergine, cf.magenta, cf.dark_blue, cf.blue, cf.bright_blue, cf.cyan, cf.dark_green, - cf.green, cf.dark_yellow, cf.yellow, cf.orange, cf.red, cf.dark_red, cf.bright_red] - n_colors = len(color_palette) - cmap = {roi_id : color_palette[(roi_id-1)%n_colors] for roi_id in roi_ids} - cmap[0] = (1,1,1,0.) - - ax = fig.add_subplot(grid[0, 1]) - ax.imshow(img) - ax.imshow(plg.to_rgba(seg, cmap), alpha=0.7) - - data_dict = Converter(**{'seg':seg[np.newaxis, np.newaxis], 'class_targets': [cl_targs]}) # needs batch dim and channel - for roi_ix, bb_target in enumerate(data_dict['bb_target'][0]): - [y1, x1, y2, x2] = bb_target - width, height = x2 - x1, y2 - y1 - cl_id = cl_targs[roi_ix] - label = cf.class_id2label[cl_id] - text_x, text_y = x2, y1 - id_text = cl_id2name[cl_id] - text_str = '{}'.format(id_text) - text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) - #ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=text_fs, ha="center", va="center") - edgecolor = label.color - bbox = plg.mpatches.Rectangle((x1, y1), width, height, linewidth=1.05, edgecolor=edgecolor, facecolor='none') - ax.add_patch(bbox) - ax.axis('off') - - # ---- seg example ----- - for roi_id in roi_ids: - seg[seg==roi_id] = cl_targs[roi_id-1] - - ax = fig.add_subplot(grid[0,0]) - ax.imshow(img) - ax.imshow(plg.to_rgba(seg, cf.cmap), alpha=0.7) - ax.axis('off') - - plg.plt.tight_layout() - outfile = os.path.join(plot_dir, "cityscapes_example.png") - print("Saving plot to {}".format(outfile)) - plg.plt.savefig(outfile, bbox_inches='tight', dpi=600) - - - - - -if __name__=="__main__": - stime = time.time() - #seg_det_cityscapes_example() - #box_clustering() - #sketch_AP_AUC(draw_auc=False) - #draw_toy_cylinders() - #prostate_GT_examples(plot_dir="/home/gregor/Dropbox/Thesis/Main/MFPPresentation/graphics") - #prostate_results_static() - #prostate_dataset_stats(plot_dir="/home/gregor/Dropbox/Thesis/Main/MFPPresentation/graphics", show_splits=False) - #lidc_dataset_stats() - #lidc_sa_dataset_stats() - #lidc_annotator_confusion() - #lidc_merged_sa_joint_plot() - #lidc_annotator_dissent_images() - exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_frcnn3d_cl_bs6" - #multiple_clustering_results('prostate', exp_dir, plot_hist=True) - exp_parent_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments" - exp_parent_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments_debug_retinas" - #get_plot_clustering_results('prostate', exp_parent_dir, res_from_file=False) - - exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_frcnn3d_cl_bs6" - #cf = get_cf('prostate', exp_dir) - #plot_file = os.path.join(exp_dir, "inference_analysis/bytes_merged_boxes_fold_1_pid_177.pkl") - #plot_single_results(cf, exp_dir, plot_file) - - exp_dir1 = "/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rg_bs8" - exp_dir2 = "/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rgbin_bs8" - #find_suitable_examples(exp_dir1, exp_dir2) - #plot_single_results_lidc() - plot_dir = "/home/gregor/Dropbox/Thesis/MICCAI2019/Graphics" - #lidc_results_static(plot_dir=plot_dir) - #toy_results_static(plot_dir=plot_dir) - plot_lidc_dissent_and_example(plot_dir=plot_dir, confusion_matrix=True, numbering=False, example_title="LIDC example result") - - mins, secs = divmod((time.time() - stime), 60) - h, mins = divmod(mins, 60) - t = "{:d}h:{:02d}m:{:02d}s".format(int(h), int(mins), int(secs)) - print("{} total runtime: {}".format(os.path.split(__file__)[1], t)) \ No newline at end of file diff --git a/inference_analysis.py b/inference_analysis.py index cce9bc9..5ae0072 100644 --- a/inference_analysis.py +++ b/inference_analysis.py @@ -1,173 +1,165 @@ """for presentations etc""" import plotting as plg import sys import os import pickle import numpy as np import pandas as pd import torch import utils.exp_utils as utils import utils.model_utils as mutils from predictor import Predictor from evaluator import Evaluator def find_pid_in_splits(pid, exp_dir=None): if exp_dir is None: exp_dir = cf.exp_dir check_file = os.path.join(exp_dir, 'fold_ids.pickle') with open(check_file, 'rb') as handle: splits = pickle.load(handle) finds = [] for i, split in enumerate(splits): if pid in split: finds.append(i) print("Pid {} found in split {}".format(pid, i)) if not len(finds)==1: raise Exception("pid {} found in more than one split: {}".format(pid, finds)) return finds[0] def plot_train_forward(slices=None): with torch.no_grad(): batch = next(val_gen) results_dict = net.train_forward(batch, is_validation=True) #seg preds are int preds already out_file = os.path.join(anal_dir, "straight_val_inference_fold_{}".format(str(cf.fold))) plg.view_batch(cf, batch, res_dict=results_dict, show_info=False, legend=True, out_file=out_file, slices=slices) def plot_forward(pid, slices=None): with torch.no_grad(): batch = batch_gen['test'].generate_train_batch(pid=pid) results_dict = net.test_forward(batch) #seg preds are only seg_logits! need to take argmax. if 'seg_preds' in results_dict.keys(): results_dict['seg_preds'] = np.argmax(results_dict['seg_preds'], axis=1)[:,np.newaxis] out_file = os.path.join(anal_dir, "straight_inference_fold_{}_pid_{}".format(str(cf.fold), pid)) plg.view_batch(cf, batch, res_dict=results_dict, show_info=False, legend=True, show_gt_labels=True, out_file=out_file, sample_picks=slices) def plot_merged_boxes(results_list, pid, plot_mods=False, show_seg_ids="all", show_info=True, show_gt_boxes=True, s_picks=None, vol_slice_picks=None, score_thres=None): """ :param results_list: holds (results_dict, pid) :param pid: :return: """ results_dict = [res_dict for (res_dict, pid_) in results_list if pid_==pid][0] #seg preds are discarded in predictor pipeline. #del results_dict['seg_preds'] batch = batch_gen['test'].generate_train_batch(pid=pid) out_file = os.path.join(anal_dir, "merged_boxes_fold_{}_pid_{}_thres_{}.png".format(str(cf.fold), pid, str(score_thres).replace(".","_"))) utils.save_obj({'res_dict':results_dict, 'batch':batch}, os.path.join(anal_dir, "bytes_merged_boxes_fold_{}_pid_{}".format(str(cf.fold), pid))) plg.view_batch(cf, batch, res_dict=results_dict, show_info=show_info, legend=False, sample_picks=s_picks, show_seg_pred=True, show_seg_ids=show_seg_ids, show_gt_boxes=show_gt_boxes, box_score_thres=score_thres, vol_slice_picks=vol_slice_picks, show_gt_labels=True, plot_mods=plot_mods, out_file=out_file, has_colorchannels=cf.has_colorchannels, dpi=600) return if __name__=="__main__": class Args(): def __init__(self): - #self.dataset_name = "datasets/prostate" self.dataset_name = "datasets/lidc" - #self.exp_dir = "datasets/toy/experiments/mrcnnal2d_clkengal" # detunet2d_di_bs16_ps512" - #self.exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_retinau3d_cl_bs6" - #self.exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_frcnn3d_cl_bs6" - #self.exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments_t2/gs6071_mrcnn3d_cl_bs6_lessaug" - #self.exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/prostate/experiments/gs6071_detfpn3d_cl_bs6" - #self.exp_dir = "/home/gregor/networkdrives/E132-Cluster-Projects/lidc_sa/experiments/ms12345_mrcnn3d_rgbin_bs8" - self.exp_dir = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rg_bs8' - #self.exp_dir = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rgbin_bs8' + self.exp_dir = '/home/gregor/Documents/medicaldetectiontoolkit/datasets/lidc/experiments/ms12345_mrcnn3d_rg_copiedparams' self.server_env = False args = Args() data_loader = utils.import_module('dl', os.path.join(args.dataset_name, "data_loader.py")) config_file = utils.import_module('cf', os.path.join(args.exp_dir, "configs.py")) - cf = config_file.Configs() + cf = config_file.configs() cf.exp_dir = args.exp_dir cf.test_dir = cf.exp_dir pid = '0811a' cf.fold = find_pid_in_splits(pid) #cf.fold = 0 cf.merge_2D_to_3D_preds = False if cf.merge_2D_to_3D_preds: cf.dim==3 cf.fold_dir = os.path.join(cf.exp_dir, 'fold_{}'.format(cf.fold)) anal_dir = os.path.join(cf.exp_dir, "inference_analysis") logger = utils.get_logger(cf.exp_dir) model = utils.import_module('model', os.path.join(cf.exp_dir, "model.py")) torch.backends.cudnn.benchmark = cf.dim == 3 net = model.net(cf, logger).cuda() test_predictor = Predictor(cf, None, logger, mode='test') test_evaluator = Evaluator(cf, logger, mode='test') #val_gen = data_loader.get_train_generators(cf, logger, data_statistics=False)['val_sampling'] batch_gen = data_loader.get_test_generator(cf, logger) weight_paths = [os.path.join(cf.fold_dir, '{}_best_params.pth'.format(rank)) for rank in test_predictor.epoch_ranking] try: pids = batch_gen["test"].dataset_pids except: pids = batch_gen["test"].generator.dataset_pids print("pids in test set: ", pids) #pid = pids[0] #assert pid in pids # load already trained model weights rank = 0 weight_path = weight_paths[rank] with torch.no_grad(): pass net.load_state_dict(torch.load(weight_path)) net.eval() # generate a batch from test set and show results if not os.path.isdir(anal_dir): os.mkdir(anal_dir) #plot_train_forward() #plot_forward(pids[0]) #net.actual_dims() #batch_gen = data_loader.get_test_generator(cf, logger) merged_boxes_file = os.path.join(cf.fold_dir, "merged_box_results") try: results_list = utils.load_obj(merged_boxes_file+".pkl") print("loaded merged boxes from file.") except FileNotFoundError: results_list = test_predictor.load_saved_predictions() utils.save_obj(results_list, merged_boxes_file) cf.plot_class_ids = False for pid in [pid,]:#['0317a',]:#pids[2:8]: assert pid in [res[1] for res in results_list] plot_merged_boxes(results_list, pid=pid, show_info=True, show_gt_boxes=True, show_seg_ids="all", score_thres=0.13, s_picks=None, vol_slice_picks=None, plot_mods=False) diff --git a/models/mrcnn_gan.py b/models/mrcnn_gan.py deleted file mode 100644 index af5632c..0000000 --- a/models/mrcnn_gan.py +++ /dev/null @@ -1,844 +0,0 @@ -#!/usr/bin/env python -# Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -""" -Parts are based on https://github.com/multimodallearning/pytorch-mask-rcnn -published under MIT license. -""" -import time - -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F -import torch.utils - -import utils.model_utils as mutils -import utils.exp_utils as utils - - -class Generator_RPN(nn.Module): - """ - Region Proposal Network. - """ - - def __init__(self, cf, conv): - - super(Generator_RPN, self).__init__() - self.dim = conv.dim - - #assert cf.batch_size%2==0 - self.conv_shared = conv(cf.end_filts+1, cf.n_rpn_features, ks=3, stride=cf.rpn_anchor_stride, pad=1, relu=cf.relu) - self.conv_class = conv(cf.n_rpn_features, 2 * len(cf.rpn_anchor_ratios), ks=1, stride=1, relu=None) - self.conv_bbox = conv(cf.n_rpn_features, 2 * self.dim * len(cf.rpn_anchor_ratios), ks=1, stride=1, relu=None) - - - def forward(self, x): - """ - :param x: input feature maps (b, in_channels, y, x, (z)) - :return: rpn_class_logits (b, n_anchors, 2) - :return: rpn_probs_logits (b, n_anchors, 2) - :return: rpn_bbox (b, n_anchors, 2*dim) - """ - # latent vector from vanilla base distribution - z = torch.randn(x.shape[0], 1, *x.shape[2:], requires_grad=True).cuda() - x = torch.cat((x,z), dim=1) - # Shared convolutional base of the RPN. - x = self.conv_shared(x) - - # Anchor Score. (batch, anchors per location * 2, y, x, (z)). - rpn_class_logits = self.conv_class(x) - # Reshape to (batch, anchors, 2) - axes = (0, 2, 3, 1) if self.dim == 2 else (0, 2, 3, 4, 1) - rpn_class_logits = rpn_class_logits.permute(*axes) - rpn_class_logits = rpn_class_logits.contiguous() - rpn_class_logits = rpn_class_logits.view(x.size()[0], -1, 2) - - # Softmax on last dimension (fg vs. bg). - rpn_probs = F.softmax(rpn_class_logits, dim=2) - - # Bounding box refinement. (batch, anchors_per_location * (y, x, (z), log(h), log(w), (log(d)), y, x, (z)) - rpn_bbox = self.conv_bbox(x) - - # Reshape to (batch, anchors, 2*dim) - rpn_bbox = rpn_bbox.permute(*axes) - rpn_bbox = rpn_bbox.contiguous() - rpn_bbox = rpn_bbox.view(x.size()[0], -1, self.dim * 2) - - return [rpn_class_logits, rpn_probs, rpn_bbox] - -class RPN_Discriminator(nn.Module): - """ - Region Proposal Network. - """ - - def __init__(self, cf, conv): - - super(RPN_Discriminator, self).__init__() - self.dim = conv.dim - - #assert cf.batch_size%2==0 - self.resizer = nn.Sequential( - conv(cf.end_filts, cf.end_filts//2, ks=3, stride=cf.rpn_anchor_stride, pad=0, relu=cf.relu), - nn.MaxPool2d(kernel_size=3, stride=2, padding=0) if \ - conv.dim == 2 else nn.MaxPool3d(kernel_size=3,stride=(2, 2, 1),padding=0), - conv(cf.end_filts//2, cf.end_filts // 2, ks=1, stride=1, pad=0, relu=cf.relu), - nn.MaxPool2d(kernel_size=3, stride=2, padding=0) if \ - conv.dim == 2 else nn.MaxPool3d(kernel_size=3, stride=(2, 2, 1), padding=0), - - ) - self.in_channels = cf.end_filts * 4 - self.conv2 = conv(cf.end_filts, cf.n_rpn_features, ks=1, stride=1, pad=1, relu=cf.relu) - self.conv3 = conv(cf.n_rpn_features, 2 * len(cf.rpn_anchor_ratios), ks=1, stride=1, relu=None) - - def forward(self, f_maps, probs, deltas): - """ - :param feature_maps: list of tensors of sizes (bsize, cf.end_filts, varying map dimensions) - :param probs: tensor of size (bsize, n_proposals on all fpn layers, 2) - :param deltas: tensor of size (bsize, n_proposals on all fpn layers, cf.dim*2) - :return: - """ - f_maps = [self.resizer(m) for m in f_maps] - x = torch.cat([t.view(t.shape[0], t.shape[1], -1) for t in f_maps], dim=-1) - x = x.view(-1, self.in_channels) - x = torch.cat((x,z), dim=1) - # Shared convolutional base of the RPN. - x = self.conv_shared(x) - - # Anchor Score. (batch, anchors per location * 2, y, x, (z)). - rpn_class_logits = self.conv_class(x) - # Reshape to (batch, 2, anchors) - axes = (0, 2, 3, 1) if self.dim == 2 else (0, 2, 3, 4, 1) - rpn_class_logits = rpn_class_logits.permute(*axes) - rpn_class_logits = rpn_class_logits.contiguous() - rpn_class_logits = rpn_class_logits.view(x.size()[0], -1, 2) - - # Softmax on last dimension (fg vs. bg). - rpn_probs = F.softmax(rpn_class_logits, dim=2) - - # Bounding box refinement. (batch, anchors_per_location * (y, x, (z), log(h), log(w), (log(d)), y, x, (z)) - rpn_bbox = self.conv_bbox(x) - - # Reshape to (batch, 2*dim, anchors) - rpn_bbox = rpn_bbox.permute(*axes) - rpn_bbox = rpn_bbox.contiguous() - rpn_bbox = rpn_bbox.view(x.size()[0], -1, self.dim * 2) - - return [rpn_class_logits, rpn_probs, rpn_bbox] - - - - - -class Classifier(nn.Module): - """ - Head network for classification and bounding box refinement. Performs RoiAlign, processes resulting features through a - shared convolutional base and finally branches off the classifier- and regression head. - """ - def __init__(self, cf, conv): - super(Classifier, self).__init__() - - self.cf = cf - self.dim = conv.dim - self.in_channels = cf.end_filts - self.pool_size = cf.pool_size - self.pyramid_levels = cf.pyramid_levels - # instance_norm does not work with spatial dims (1, 1, (1)) - norm = cf.norm if cf.norm != 'instance_norm' else None - - self.conv1 = conv(cf.end_filts, cf.end_filts * 4, ks=self.pool_size, stride=1, norm=norm, relu=cf.relu) - self.conv2 = conv(cf.end_filts * 4, cf.end_filts * 4, ks=1, stride=1, norm=norm, relu=cf.relu) - self.linear_bbox = nn.Linear(cf.end_filts * 4, cf.head_classes * 2 * self.dim) - - - if 'regression' in self.cf.prediction_tasks: - self.linear_regressor = nn.Linear(cf.end_filts * 4, cf.head_classes * cf.regression_n_features) - self.rg_n_feats = cf.regression_n_features - #classify into bins of regression values - elif 'regression_bin' in self.cf.prediction_tasks: - self.linear_regressor = nn.Linear(cf.end_filts * 4, cf.head_classes * len(cf.bin_labels)) - self.rg_n_feats = len(cf.bin_labels) - else: - self.linear_regressor = lambda x: torch.zeros((x.shape[0], cf.head_classes * cf.regression_n_features), dtype=torch.float32).fill_(float('NaN')).cuda() - self.rg_n_feats = cf.regression_n_features - if 'class' in self.cf.prediction_tasks: - self.linear_class = nn.Linear(cf.end_filts * 4, cf.head_classes) - else: - assert cf.head_classes == 2, "#head classes {} needs to be 2 (bg/fg) when not predicting classes".format(cf.head_classes) - self.linear_class = lambda x: torch.zeros((x.shape[0], cf.head_classes), dtype=torch.float64).cuda() - #print("\n\nWARNING: using extra class head\n\n") - #self.linear_class = nn.Linear(cf.end_filts * 4, cf.head_classes) - - def forward(self, x, rois): - """ - :param x: input feature maps (b, in_channels, y, x, (z)) - :param rois: normalized box coordinates as proposed by the RPN to be forwarded through - the second stage (n_proposals, (y1, x1, y2, x2, (z1), (z2), batch_ix). Proposals of all batch elements - have been merged to one vector, while the origin info has been stored for re-allocation. - :return: mrcnn_class_logits (n_proposals, n_head_classes) - :return: mrcnn_bbox (n_proposals, n_head_classes, 2 * dim) predicted corrections to be applied to proposals for refinement. - """ - x = mutils.pyramid_roi_align(x, rois, self.pool_size, self.pyramid_levels, self.dim) - x = self.conv1(x) - x = self.conv2(x) - x = x.view(-1, self.in_channels * 4) - - mrcnn_bbox = self.linear_bbox(x) - mrcnn_bbox = mrcnn_bbox.view(mrcnn_bbox.size()[0], -1, self.dim * 2) - mrcnn_class_logits = self.linear_class(x) - mrcnn_regress = self.linear_regressor(x) - mrcnn_regress = mrcnn_regress.view(mrcnn_regress.size()[0], -1, self.rg_n_feats) - - return [mrcnn_bbox, mrcnn_class_logits, mrcnn_regress] - - -class Mask(nn.Module): - """ - Head network for proposal-based mask segmentation. Performs RoiAlign, some convolutions and applies sigmoid on the - output logits to allow for overlapping classes. - """ - def __init__(self, cf, conv): - super(Mask, self).__init__() - self.pool_size = cf.mask_pool_size - self.pyramid_levels = cf.pyramid_levels - self.dim = conv.dim - self.conv1 = conv(cf.end_filts, cf.end_filts, ks=3, stride=1, pad=1, norm=cf.norm, relu=cf.relu) - self.conv2 = conv(cf.end_filts, cf.end_filts, ks=3, stride=1, pad=1, norm=cf.norm, relu=cf.relu) - self.conv3 = conv(cf.end_filts, cf.end_filts, ks=3, stride=1, pad=1, norm=cf.norm, relu=cf.relu) - self.conv4 = conv(cf.end_filts, cf.end_filts, ks=3, stride=1, pad=1, norm=cf.norm, relu=cf.relu) - if conv.dim == 2: - self.deconv = nn.ConvTranspose2d(cf.end_filts, cf.end_filts, kernel_size=2, stride=2) - else: - self.deconv = nn.ConvTranspose3d(cf.end_filts, cf.end_filts, kernel_size=2, stride=2) - - self.relu = nn.ReLU(inplace=True) if cf.relu == 'relu' else nn.LeakyReLU(inplace=True) - self.conv5 = conv(cf.end_filts, cf.head_classes, ks=1, stride=1, relu=None) - self.sigmoid = nn.Sigmoid() - - def forward(self, x, rois): - """ - :param x: input feature maps (b, in_channels, y, x, (z)) - :param rois: normalized box coordinates as proposed by the RPN to be forwarded through - the second stage (n_proposals, (y1, x1, y2, x2, (z1), (z2), batch_ix). Proposals of all batch elements - have been merged to one vector, while the origin info has been stored for re-allocation. - :return: x: masks (n_sampled_proposals (n_detections in inference), n_classes, y, x, (z)) - """ - x = mutils.pyramid_roi_align(x, rois, self.pool_size, self.pyramid_levels, self.dim) - x = self.conv1(x) - x = self.conv2(x) - x = self.conv3(x) - x = self.conv4(x) - x = self.relu(self.deconv(x)) - x = self.conv5(x) - x = self.sigmoid(x) - return x - - -############################################################ -# Loss Functions -############################################################ - -def compute_rpn_class_loss(rpn_class_logits, rpn_match, shem_poolsize): - """ - :param rpn_match: (n_anchors). [-1, 0, 1] for negative, neutral, and positive matched anchors. - :param rpn_class_logits: (n_anchors, 2). logits from RPN classifier. - :param SHEM_poolsize: int. factor of top-k candidates to draw from per negative sample (stochastic-hard-example-mining). - :return: loss: torch tensor - :return: np_neg_ix: 1D array containing indices of the neg_roi_logits, which have been sampled for training. - """ - - # Filter out netural anchors - pos_indices = torch.nonzero(rpn_match == 1) - neg_indices = torch.nonzero(rpn_match == -1) - - # loss for positive samples - if not 0 in pos_indices.size(): - pos_indices = pos_indices.squeeze(1) - roi_logits_pos = rpn_class_logits[pos_indices] - pos_loss = F.cross_entropy(roi_logits_pos, torch.LongTensor([1] * pos_indices.shape[0]).cuda()) - else: - pos_loss = torch.FloatTensor([0]).cuda() - - # loss for negative samples: draw hard negative examples (SHEM) - # that match the number of positive samples, but at least 1. - if not 0 in neg_indices.size(): - neg_indices = neg_indices.squeeze(1) - roi_logits_neg = rpn_class_logits[neg_indices] - negative_count = np.max((1, pos_indices.cpu().data.numpy().size)) - roi_probs_neg = F.softmax(roi_logits_neg, dim=1) - neg_ix = mutils.shem(roi_probs_neg, negative_count, shem_poolsize) - neg_loss = F.cross_entropy(roi_logits_neg[neg_ix], torch.LongTensor([0] * neg_ix.shape[0]).cuda()) - np_neg_ix = neg_ix.cpu().data.numpy() - #print("pos, neg count", pos_indices.cpu().data.numpy().size, negative_count) - else: - neg_loss = torch.FloatTensor([0]).cuda() - np_neg_ix = np.array([]).astype('int32') - - loss = (pos_loss + neg_loss) / 2 - return loss, np_neg_ix - - -def compute_rpn_bbox_loss(rpn_pred_deltas, rpn_target_deltas, rpn_match): - """ - :param rpn_target_deltas: (b, n_positive_anchors, (dy, dx, (dz), log(dh), log(dw), (log(dd)))). - Uses 0 padding to fill in unsed bbox deltas. - :param rpn_pred_deltas: predicted deltas from RPN. (b, n_anchors, (dy, dx, (dz), log(dh), log(dw), (log(dd)))) - :param rpn_match: (n_anchors). [-1, 0, 1] for negative, neutral, and positive matched anchors. - :return: loss: torch 1D tensor. - """ - if not 0 in torch.nonzero(rpn_match == 1).size(): - - indices = torch.nonzero(rpn_match == 1).squeeze(1) - # Pick bbox deltas that contribute to the loss - rpn_pred_deltas = rpn_pred_deltas[indices] - # Trim target bounding box deltas to the same length as rpn_bbox. - target_deltas = rpn_target_deltas[:rpn_pred_deltas.size()[0], :] - # Smooth L1 loss - loss = F.smooth_l1_loss(rpn_pred_deltas, target_deltas) - else: - loss = torch.FloatTensor([0]).cuda() - - return loss - -def compute_disc_loss(d_target, d_pred, target, shem_poolsize): - - - - - return - - -def compute_mrcnn_bbox_loss(mrcnn_pred_deltas, mrcnn_target_deltas, target_class_ids): - """ - :param mrcnn_target_deltas: (n_sampled_rois, (dy, dx, (dz), log(dh), log(dw), (log(dh))) - :param mrcnn_pred_deltas: (n_sampled_rois, n_classes, (dy, dx, (dz), log(dh), log(dw), (log(dh))) - :param target_class_ids: (n_sampled_rois) - :return: loss: torch 1D tensor. - """ - if not 0 in torch.nonzero(target_class_ids > 0).size(): - positive_roi_ix = torch.nonzero(target_class_ids > 0)[:, 0] - positive_roi_class_ids = target_class_ids[positive_roi_ix].long() - target_bbox = mrcnn_target_deltas[positive_roi_ix, :].detach() - pred_bbox = mrcnn_pred_deltas[positive_roi_ix, positive_roi_class_ids, :] - loss = F.smooth_l1_loss(pred_bbox, target_bbox) - else: - loss = torch.FloatTensor([0]).cuda() - - return loss - -def compute_mrcnn_mask_loss(pred_masks, target_masks, target_class_ids): - """ - :param target_masks: (n_sampled_rois, y, x, (z)) A float32 tensor of values 0 or 1. Uses zero padding to fill array. - :param pred_masks: (n_sampled_rois, n_classes, y, x, (z)) float32 tensor with values between [0, 1]. - :param target_class_ids: (n_sampled_rois) - :return: loss: torch 1D tensor. - """ - if not 0 in torch.nonzero(target_class_ids > 0).size(): - # Only positive ROIs contribute to the loss. And only - # the class-specific mask of each ROI. - positive_ix = torch.nonzero(target_class_ids > 0)[:, 0] - positive_class_ids = target_class_ids[positive_ix].long() - y_true = target_masks[positive_ix, :, :].detach() - y_pred = pred_masks[positive_ix, positive_class_ids, :, :] - loss = F.binary_cross_entropy(y_pred, y_true) - else: - loss = torch.FloatTensor([0]).cuda() - - return loss - -def compute_mrcnn_class_loss(tasks, pred_class_logits, target_class_ids): - """ - :param pred_class_logits: (n_sampled_rois, n_classes) - :param target_class_ids: (n_sampled_rois) batch dimension was merged into roi dimension. - :return: loss: torch 1D tensor. - """ - if 'class' in tasks and not 0 in target_class_ids.size(): - #if 0 in target_class_ids.size(): - # print("WARNING: using additional cl head") - loss = F.cross_entropy(pred_class_logits, target_class_ids.long()) - else: - loss = torch.FloatTensor([0.]).cuda() - - return loss - -def compute_mrcnn_regression_loss(tasks, pred, target, target_class_ids): - """regression loss is a distance metric between target vector and predicted regression vector. - :param pred: (n_sampled_rois, n_classes, [n_rg_feats if real regression or 1 if rg_bin task) - :param target: (n_sampled_rois, [n_rg_feats or n_rg_bins]) - :return: differentiable loss, torch 1D tensor on cuda - """ - - if not 0 in target.shape and not 0 in torch.nonzero(target_class_ids > 0).shape: - if "regression_bin" in tasks: - positive_roi_ix = torch.nonzero(target_class_ids > 0)[:, 0] - positive_roi_class_ids = target_class_ids[positive_roi_ix].long() - target = target[positive_roi_ix].detach() - pred = pred[positive_roi_ix, positive_roi_class_ids] #are the class logits - loss = F.cross_entropy(pred, target.long()) - else: - positive_roi_ix = torch.nonzero(target_class_ids > 0)[:, 0] - positive_roi_class_ids = target_class_ids[positive_roi_ix].long() - target = target[positive_roi_ix, :].detach() - pred = pred[positive_roi_ix, positive_roi_class_ids, :] - loss = F.smooth_l1_loss(pred, target) - else: - loss = torch.FloatTensor([0.]).cuda() - - return loss - -############################################################ -# Detection Layer -############################################################ - -def compute_roi_scores(cf, batch_rpn_proposals, mrcnn_cl_logits): - """Compute scores from uncertainty measures (lower=better) to use for sorting/clustering algos (higher=better). - :param cf: - :param uncert_class: - :param uncert_regression: - :return: - """ - if not 'class' in cf.prediction_tasks: - scores = batch_rpn_proposals[:, :, -1].view(-1, 1) - scores = torch.cat((1 - scores, scores), dim=1) - else: - #print("WARNING: using extra class head") - scores = F.softmax(mrcnn_cl_logits, dim=1) - - return scores - -############################################################ -# MaskRCNN Class -############################################################ - -class net(nn.Module): - - - def __init__(self, cf, logger): - - super(net, self).__init__() - self.cf = cf - self.logger = logger - self.build() - - - if self.cf.weight_init=="custom": - logger.info("Tried to use custom weight init which is not defined. Using pytorch default.") - elif self.cf.weight_init: - mutils.initialize_weights(self) - else: - logger.info("using default pytorch weight init") - - def build(self): - """Build Mask R-CNN architecture.""" - - # Image size must be dividable by 2 multiple times. - h, w = self.cf.patch_size[:2] - if h / 2**5 != int(h / 2**5) or w / 2**5 != int(w / 2**5): - raise Exception("Image size must be divisible by 2 at least 5 times " - "to avoid fractions when downscaling and upscaling." - "For example, use 256, 320, 384, 448, 512, ... etc.,i.e.," - "any number x*32 will do!") - - # instantiate abstract multi-dimensional conv generator and load backbone module. - backbone = utils.import_module('bbone', self.cf.backbone_path) - conv = backbone.ConvGenerator(self.cf.dim) - - # build Anchors, FPN, RPN, Classifier / Bbox-Regressor -head, Mask-head - self.np_anchors = mutils.generate_pyramid_anchors(self.logger, self.cf) - self.anchors = torch.from_numpy(self.np_anchors).float().cuda() - self.fpn = backbone.FPN(self.cf, conv, relu_enc=self.cf.relu, operate_stride1=False).cuda() - self.rpn = Generator_RPN(self.cf, conv) - self.discriminator = RPN_Discriminator(self.cf, conv) - self.classifier = Classifier(self.cf, conv) - self.mask = Mask(self.cf, conv) - - def forward(self, img, is_training=True): - """ - :param img: input images (b, c, y, x, (z)). - :return: rpn_pred_logits: (b, n_anchors, 2) - :return: rpn_pred_deltas: (b, n_anchors, (y, x, (z), log(h), log(w), (log(d)))) - :return: batch_proposal_boxes: (b, n_proposals, (y1, x1, y2, x2, (z1), (z2), batch_ix)) only for monitoring/plotting. - :return: detections: (n_final_detections, (y1, x1, y2, x2, (z1), (z2), batch_ix, pred_class_id, pred_score) - :return: detection_masks: (n_final_detections, n_classes, y, x, (z)) raw molded masks as returned by mask-head. - """ - # extract features. - fpn_outs = self.fpn(img) - rpn_feature_maps = [fpn_outs[i] for i in self.cf.pyramid_levels] - self.mrcnn_feature_maps = rpn_feature_maps - - # loop through pyramid layers and apply RPN. - layer_outputs = [ self.rpn(p_feats) for p_feats in rpn_feature_maps ] - - # concatenate layer outputs. - # convert from list of lists of level outputs to list of lists of outputs across levels. - # e.g. [[a1, b1, c1], [a2, b2, c2]] => [[a1, a2], [b1, b2], [c1, c2]] - outputs = list(zip(*layer_outputs)) - rpn_pred_logits, rpn_pred_probs, rpn_pred_deltas = [torch.cat(list(o), dim=1) for o in outputs] - # - # # generate proposals: apply predicted deltas to anchors and filter by foreground scores from RPN classifier. - proposal_count = self.cf.post_nms_rois_training if is_training else self.cf.post_nms_rois_inference - batch_normed_props, batch_unnormed_props = mutils.refine_proposals(rpn_pred_probs, rpn_pred_deltas,proposal_count, - self.anchors, self.cf) - # merge batch dimension of proposals while storing allocation info in coordinate dimension. - batch_ixs = torch.arange( - batch_normed_props.shape[0]).cuda().unsqueeze(1).repeat(1, batch_normed_props.shape[1]).view(-1).float() - rpn_rois = batch_normed_props[:, :, :-1].view(-1, batch_normed_props[:, :, :-1].shape[2]) - self.rpn_rois_batch_info = torch.cat((rpn_rois, batch_ixs.unsqueeze(1)), dim=1) - - # this is the first of two forward passes in the second stage, where no activations are stored for backprop. - # here, all proposals are forwarded (with virtual_batch_size = batch_size * post_nms_rois.) - # for inference/monitoring as well as sampling of rois for the loss functions. - # processed in chunks of roi_batch_size to re-adjust to gpu-memory. - chunked_rpn_rois = self.rpn_rois_batch_info.split(self.cf.roi_batch_size) - bboxes_list, class_logits_list, regressions_list = [], [], [] - with torch.no_grad(): - for chunk in chunked_rpn_rois: - chunk_bboxes, chunk_class_logits, chunk_regressions = self.classifier(self.mrcnn_feature_maps, chunk) - bboxes_list.append(chunk_bboxes) - class_logits_list.append(chunk_class_logits) - regressions_list.append(chunk_regressions) - mrcnn_bbox = torch.cat(bboxes_list, 0) - mrcnn_class_logits = torch.cat(class_logits_list, 0) - mrcnn_regressions = torch.cat(regressions_list, 0) - self.mrcnn_roi_scores = compute_roi_scores(self.cf, batch_normed_props, mrcnn_class_logits) - - # refine classified proposals, filter and return final detections. - # returns (cf.max_inst_per_batch_element, n_coords+1+...) - detections = mutils.refine_detections(self.cf, batch_ixs, rpn_rois, mrcnn_bbox, self.mrcnn_roi_scores, - mrcnn_regressions) - - # forward remaining detections through mask-head to generate corresponding masks. - scale = [img.shape[2]] * 4 + [img.shape[-1]] * 2 - scale = torch.from_numpy(np.array(scale[:self.cf.dim * 2] + [1])[None]).float().cuda() - - # first self.cf.dim * 2 entries on axis 1 are always the box coords, +1 is batch_ix - detection_boxes = detections[:, :self.cf.dim * 2 + 1] / scale - with torch.no_grad(): - detection_masks = self.mask(self.mrcnn_feature_maps, detection_boxes) - - return rpn_pred_logits, rpn_pred_probs, rpn_pred_deltas, batch_unnormed_props, detections, detection_masks - - def loss_samples_forward(self, batch_gt_boxes, batch_gt_masks, batch_gt_class_ids, batch_gt_regressions=None): - """ - this is the second forward pass through the second stage (features from stage one are re-used). - samples few rois in loss_example_mining and forwards only those for loss computation. - :param batch_gt_class_ids: list over batch elements. Each element is a list over the corresponding roi target labels. - :param batch_gt_boxes: list over batch elements. Each element is a list over the corresponding roi target coordinates. - :param batch_gt_masks: list over batch elements. Each element is binary mask of shape (n_gt_rois, y, x, (z), c) - :return: sample_logits: (n_sampled_rois, n_classes) predicted class scores. - :return: sample_deltas: (n_sampled_rois, n_classes, 2 * dim) predicted corrections to be applied to proposals for refinement. - :return: sample_mask: (n_sampled_rois, n_classes, y, x, (z)) predicted masks per class and proposal. - :return: sample_target_class_ids: (n_sampled_rois) target class labels of sampled proposals. - :return: sample_target_deltas: (n_sampled_rois, 2 * dim) target deltas of sampled proposals for box refinement. - :return: sample_target_masks: (n_sampled_rois, y, x, (z)) target masks of sampled proposals. - :return: sample_proposals: (n_sampled_rois, 2 * dim) RPN output for sampled proposals. only for monitoring/plotting. - """ - # sample rois for loss and get corresponding targets for all Mask R-CNN head network losses. - sample_ics, sample_target_deltas, sample_target_mask, sample_target_class_ids, sample_target_regressions = \ - mutils.loss_example_mining(self.cf, self.rpn_rois_batch_info, batch_gt_boxes, batch_gt_masks, - self.mrcnn_roi_scores, batch_gt_class_ids, batch_gt_regressions) - - # re-use feature maps and RPN output from first forward pass. - sample_proposals = self.rpn_rois_batch_info[sample_ics] - if not 0 in sample_proposals.size(): - sample_deltas, sample_logits, sample_regressions = self.classifier(self.mrcnn_feature_maps, sample_proposals) - sample_mask = self.mask(self.mrcnn_feature_maps, sample_proposals) - else: - sample_logits = torch.FloatTensor().cuda() - sample_deltas = torch.FloatTensor().cuda() - sample_regressions = torch.FloatTensor().cuda() - sample_mask = torch.FloatTensor().cuda() - - return [sample_deltas, sample_mask, sample_logits, sample_regressions, sample_proposals, - sample_target_deltas, sample_target_mask, sample_target_class_ids, sample_target_regressions] - - def get_results(self, img_shape, detections, detection_masks, box_results_list=None, return_masks=True): - """ - Restores batch dimension of merged detections, unmolds detections, creates and fills results dict. - :param img_shape: - :param detections: shape (n_final_detections, len(info)), where - info=( y1, x1, y2, x2, (z1,z2), batch_ix, pred_class_id, pred_score ) - :param detection_masks: (n_final_detections, n_classes, y, x, (z)) raw molded masks as returned by mask-head. - :param box_results_list: None or list of output boxes for monitoring/plotting. - each element is a list of boxes per batch element. - :param return_masks: boolean. If True, full resolution masks are returned for all proposals (speed trade-off). - :return: results_dict: dictionary with keys: - 'boxes': list over batch elements. each batch element is a list of boxes. each box is a dictionary: - [[{box_0}, ... {box_n}], [{box_0}, ... {box_n}], ...] - 'seg_preds': pixel-wise class predictions (b, 1, y, x, (z)) with values [0, 1] only fg. vs. bg for now. - class-specific return of masks will come with implementation of instance segmentation evaluation. - """ - - detections = detections.cpu().data.numpy() - if self.cf.dim == 2: - detection_masks = detection_masks.permute(0, 2, 3, 1).cpu().data.numpy() - else: - detection_masks = detection_masks.permute(0, 2, 3, 4, 1).cpu().data.numpy() - # det masks shape now (n_dets, y,x(,z), n_classes) - # restore batch dimension of merged detections using the batch_ix info. - batch_ixs = detections[:, self.cf.dim*2] - detections = [detections[batch_ixs == ix] for ix in range(img_shape[0])] - mrcnn_mask = [detection_masks[batch_ixs == ix] for ix in range(img_shape[0])] - #mrcnn_mask: shape (b_size, variable, variable, n_classes), variable bc depends on single instance mask size - - if box_results_list == None: # for test_forward, where no previous list exists. - box_results_list = [[] for _ in range(img_shape[0])] - # seg_logits == seg_probs in mrcnn since mask head finishes with sigmoid (--> image space = [0,1]) - seg_probs = [] - # loop over batch and unmold detections. - for ix in range(img_shape[0]): - - # final masks are one-hot encoded (b, n_classes, y, x, (z)) - final_masks = np.zeros((self.cf.num_classes + 1, *img_shape[2:])) - #+1 for bg, 0.5 bc mask head classifies only bg/fg with logits between 0,1--> bg is <0.5 - if self.cf.num_classes + 1 != self.cf.num_seg_classes: - self.logger.warning("n of box classifier head classes {} doesnt match cf.num_seg_classes {}".format( - self.cf.num_classes + 1, self.cf.num_seg_classes)) - - if not 0 in detections[ix].shape: - boxes = detections[ix][:, :self.cf.dim*2].astype(np.int32) - class_ids = detections[ix][:, self.cf.dim*2 + 1].astype(np.int32) - scores = detections[ix][:, self.cf.dim*2 + 2] - masks = mrcnn_mask[ix][np.arange(boxes.shape[0]), ..., class_ids] - regressions = detections[ix][:,self.cf.dim*2+3:] - - # Filter out detections with zero area. Often only happens in early - # stages of training when the network weights are still a bit random. - if self.cf.dim == 2: - exclude_ix = np.where((boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) <= 0)[0] - else: - exclude_ix = np.where( - (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 5] - boxes[:, 4]) <= 0)[0] - - if exclude_ix.shape[0] > 0: - boxes = np.delete(boxes, exclude_ix, axis=0) - masks = np.delete(masks, exclude_ix, axis=0) - class_ids = np.delete(class_ids, exclude_ix, axis=0) - scores = np.delete(scores, exclude_ix, axis=0) - regressions = np.delete(regressions, exclude_ix, axis=0) - - # Resize masks to original image size and set boundary threshold. - if return_masks: - for i in range(masks.shape[0]): #masks per this batch instance/element/image - # Convert neural network mask to full size mask - if self.cf.dim == 2: - full_mask = mutils.unmold_mask_2D(masks[i], boxes[i], img_shape[2:]) - else: - full_mask = mutils.unmold_mask_3D(masks[i], boxes[i], img_shape[2:]) - # take the maximum seg_logits per class of instances in that class, i.e., a pixel in a class - # has the max seg_logit value over all instances of that class in one sample - final_masks[class_ids[i]] = np.max((final_masks[class_ids[i]], full_mask), axis=0) - final_masks[0] = np.full(final_masks[0].shape, 0.49999999) #effectively min_det_thres at 0.5 per pixel - - # add final predictions to results. - if not 0 in boxes.shape: - for ix2, coords in enumerate(boxes): - box = {'box_coords': coords, 'box_type': 'det'} - box.update({'box_score': scores[ix2], 'box_pred_class_id': class_ids[ix2]}) - #if (hasattr(self.cf, "convert_cl_to_rg") and self.cf.convert_cl_to_rg): - if "regression_bin" in self.cf.prediction_tasks: - # in this case, regression preds are actually the rg_bin_ids --> map to rg value the bin stands for - box['rg_bin'] = regressions[ix2].argmax() - box['regression'] = self.cf.bin_id2rg_val[box['rg_bin']] - else: - if hasattr(self.cf, "rg_val_to_bin_id"): - box.update({'rg_bin': self.cf.rg_val_to_bin_id(regressions[ix2])}) - box['regression'] = regressions[ix2] - - box_results_list[ix].append(box) - - # if no detections were made--> keep full bg mask (zeros). - seg_probs.append(final_masks) - - # create and fill results dictionary. - results_dict = {} - results_dict['boxes'] = box_results_list - results_dict['seg_preds'] = np.array(seg_probs) - - return results_dict - - - def train_forward(self, batch, is_validation=False): - """ - train method (also used for validation monitoring). wrapper around forward pass of network. prepares input data - for processing, computes losses, and stores outputs in a dictionary. - :param batch: dictionary containing 'data', 'seg', etc. - :return: results_dict: dictionary with keys: - 'boxes': list over batch elements. each batch element is a list of boxes. each box is a dictionary: - [[{box_0}, ... {box_n}], [{box_0}, ... {box_n}], ...] - 'seg_preds': pixel-wise class predictions (b, 1, y, x, (z)) with values [0, n_classes]. - 'torch_loss': 1D torch tensor for backprop. - 'class_loss': classification loss for monitoring. - """ - img = batch['data'] - gt_boxes = batch['bb_target'] - axes = (0, 2, 3, 1) if self.cf.dim == 2 else (0, 2, 3, 4, 1) - gt_masks = [np.transpose(batch['roi_masks'][ii], axes=axes) for ii in range(len(batch['roi_masks']))] - gt_class_ids = batch['class_targets'] - if 'regression' in self.cf.prediction_tasks: - gt_regressions = batch["regression_targets"] - elif 'regression_bin' in self.cf.prediction_tasks: - gt_regressions = batch["rg_bin_targets"] - else: - gt_regressions = None - - - img = torch.from_numpy(img).float().cuda() - batch_rpn_class_loss = torch.FloatTensor([0]).cuda() - batch_rpn_bbox_loss = torch.FloatTensor([0]).cuda() - # list of output boxes for monitoring/plotting. each element is a list of boxes per batch element. - box_results_list = [[] for _ in range(img.shape[0])] - - #forward passes. 1. general forward pass, where no activations are saved in second stage (for performance - # monitoring and loss sampling). 2. second stage forward pass of sampled rois with stored activations for backprop. - rpn_class_logits, rpn_probs, rpn_pred_deltas, proposal_boxes, detections, detection_masks = self.forward(img) - - mrcnn_pred_deltas, mrcnn_pred_mask, mrcnn_class_logits, mrcnn_regressions, sample_proposals, \ - mrcnn_target_deltas, target_mask, target_class_ids, target_regressions = \ - self.loss_samples_forward(gt_boxes, gt_masks, gt_class_ids, gt_regressions) - - rpn_batch_match_targets = torch.zeros(img.shape[0], self.np_anchors.shape[0]).cuda() - rpn_batch_delta_targets = torch.zeros(img.shape[0], self.np_anchors.shape[0], self.cf.dim*2).cuda() - #loop over batch - for b in range(img.shape[0]): - rpn_target_deltas = np.zeros((self.np_anchors.shape[0], self.cf.dim * 2)) - if len(gt_boxes[b]) > 0: - # add gt boxes to output list - for tix in range(len(gt_boxes[b])): - gt_box = {'box_type': 'gt', 'box_coords': batch['bb_target'][b][tix]} - for name in self.cf.roi_items: - gt_box.update({name: batch[name][b][tix]}) - box_results_list[b].append(gt_box) - - # match gt boxes with anchors to generate targets for RPN losses. - rpn_match, rpn_t_deltas = mutils.gt_anchor_matching(self.cf, self.np_anchors, gt_boxes[b]) - indices = np.nonzero(rpn_match == 1)[0] - rpn_target_deltas[indices] = rpn_t_deltas[:indices.shape[0]] - - # add positive anchors used for loss to output list for monitoring. - # pos_anchors = mutils.clip_boxes_numpy(self.np_anchors[np.argwhere(rpn_match == 1)][:, 0], img.shape[2:]) - # for p in pos_anchors: - # box_results_list[b].append({'box_coords': p, 'box_type': 'pos_anchor'}) - else: - rpn_match = np.array([-1]*self.np_anchors.shape[0]) - - rpn_batch_match_targets[b] = torch.from_numpy(rpn_match).cuda() - rpn_batch_delta_targets[b] = torch.from_numpy(rpn_target_deltas).float().cuda() - # compute RPN losses. - #rpn_class_loss, neg_anchor_ix = compute_rpn_class_loss(rpn_class_logits[b], rpn_match, self.cf.shem_poolsize) - #rpn_bbox_loss = compute_rpn_bbox_loss(rpn_pred_deltas[b], rpn_target_deltas, rpn_match) - - # batch_rpn_class_loss += rpn_class_loss /img.shape[0] - # batch_rpn_bbox_loss += rpn_bbox_loss /img.shape[0] - - # add negative anchors used for loss to output list for monitoring. - # neg_anchors = mutils.clip_boxes_numpy(self.np_anchors[np.argwhere(rpn_match == -1)][0, neg_anchor_ix], img.shape[2:]) - # for n in neg_anchors: - # box_results_list[b].append({'box_coords': n, 'box_type': 'neg_anchor'}) - - # add highest scoring proposals to output list for monitoring. - rpn_proposals = proposal_boxes[b][proposal_boxes[b, :, -1].argsort()][::-1] - for r in rpn_proposals[:self.cf.n_plot_rpn_props, :-1]: - box_results_list[b].append({'box_coords': r, 'box_type': 'prop'}) - - #filter_anchors(rpn_batch_match_targets, rpn_class_logits, rpn_batch_delta_targets, rpn_pred_deltas, - # self.cf.shem_poolsize) - # todo maybe send fixed number of rois to disc (fill up targets with bg-rois)? - non_neutral_mask = (rpn_batch_match_targets == 1) | (rpn_batch_match_targets == -1) - rpn_batch_match_targets = rpn_batch_match_targets[non_neutral_mask] - rpn_batch_delta_targets = rpn_batch_delta_targets[non_neutral_mask] - rpn_probs = rpn_probs[non_neutral_mask] - rpn_pred_deltas = rpn_pred_deltas[non_neutral_mask] - - # add positive and negative roi samples used for mrcnn losses to output list for monitoring. - # if not 0 in sample_proposals.shape: - # rois = mutils.clip_to_window(self.cf.window, sample_proposals).cpu().data.numpy() - # for ix, r in enumerate(rois): - # box_results_list[int(r[-1])].append({'box_coords': r[:-1] * self.cf.scale, - # 'box_type': 'pos_class' if target_class_ids[ix] > 0 else 'neg_class'}) - - # get discriminator judgement on predicted proposals - # d_z = self.discriminator(self.mrcnn_feature_maps, rpn_probs, rpn_pred_deltas) - d_judgement_gen = self.discriminator(self.mrcnn_feature_maps, rpn_batch_match_targets, rpn_batch_delta_targets) - - # compute Discriminator loss - compute_disc_loss(d_pred_target, d_pred_pred, d_target, self.cf.shem_poolsize) - - - # compute mrcnn losses. - mrcnn_class_loss = compute_mrcnn_class_loss(self.cf.prediction_tasks, mrcnn_class_logits, target_class_ids) - mrcnn_bbox_loss = compute_mrcnn_bbox_loss(mrcnn_pred_deltas, mrcnn_target_deltas, target_class_ids) - mrcnn_regressions_loss = compute_mrcnn_regression_loss(self.cf.prediction_tasks, mrcnn_regressions, target_regressions, target_class_ids) - # mrcnn can be run without pixelwise annotations available (Faster R-CNN mode). - # In this case, the mask_loss is taken out of training. - if not self.cf.frcnn_mode: - mrcnn_mask_loss = compute_mrcnn_mask_loss(mrcnn_pred_mask, target_mask, target_class_ids) - else: - mrcnn_mask_loss = torch.FloatTensor([0]).cuda() - - loss = batch_rpn_class_loss + batch_rpn_bbox_loss +\ - mrcnn_bbox_loss + mrcnn_mask_loss + mrcnn_class_loss + mrcnn_regressions_loss - - # monitor RPN performance: detection count = the number of correctly matched proposals per fg-class. - #dcount = [list(target_class_ids.cpu().data.numpy()).count(c) for c in np.arange(self.cf.head_classes)[1:]] - #self.logger.info("regression loss {:.3f}".format(mrcnn_regressions_loss.item())) - #self.logger.info("loss: {0:.2f}, rpn_class: {1:.2f}, rpn_bbox: {2:.2f}, mrcnn_class: {3:.2f}, mrcnn_bbox: {4:.2f}, " - # "mrcnn_mask: {5:.2f}, dcount {6}".format(loss.item(), batch_rpn_class_loss.item(), - # batch_rpn_bbox_loss.item(), mrcnn_class_loss.item(), mrcnn_bbox_loss.item(), mrcnn_mask_loss.item(), dcount)) - - # run unmolding of predictions for monitoring and merge all results to one dictionary. - if is_validation or self.cf.detect_while_training: - return_masks = self.cf.return_masks_in_val if is_validation else self.cf.return_masks_in_train - results_dict = self.get_results( - img.shape, detections, detection_masks, box_results_list, return_masks=return_masks) #TODO make multithreaded? - results_dict['seg_preds'] = results_dict['seg_preds'].argmax(axis=1).astype('uint8')[:,np.newaxis] - if 'dice' in self.cf.metrics: - results_dict['batch_dices'] = mutils.dice_per_batch_and_class( - results_dict['seg_preds'], batch["seg"], self.cf.num_seg_classes, convert_to_ohe=True) - else: - results_dict = {'boxes': box_results_list} - - results_dict['torch_loss'] = loss - results_dict['class_loss'] = mrcnn_class_loss.item() - results_dict['bbox_loss'] = mrcnn_bbox_loss.item() - results_dict['rg_loss'] = mrcnn_regressions_loss.item() - results_dict['rpn_class_loss'] = rpn_class_loss.item() - results_dict['rpn_bbox_loss'] = rpn_bbox_loss.item() - # #todo remove assert when sufficiently checked - # boxescoords = [b['box_coords'] for boxlist in box_results_list for b in boxlist] - # coords_check = np.array([len(coords) == self.cf.dim*2 for coords in boxescoords]) - # assert np.all(coords_check), "cand box with wrong bcoords dim: {}".format(boxescoords[~coords_check]) - - return results_dict - - - def test_forward(self, batch, return_masks=True): - """ - test method. wrapper around forward pass of network without usage of any ground truth information. - prepares input data for processing and stores outputs in a dictionary. - :param batch: dictionary containing 'data' - :param return_masks: boolean. If True, full resolution masks are returned for all proposals (speed trade-off). - :return: results_dict: dictionary with keys: - 'boxes': list over batch elements. each batch element is a list of boxes. each box is a dictionary: - [[{box_0}, ... {box_n}], [{box_0}, ... {box_n}], ...] - 'seg_preds': pixel-wise class predictions (b, 1, y, x, (z)) with values [0, n_classes] - """ - img = batch['data'] - img = torch.from_numpy(img).float().cuda() - _, _, _, detections, detection_masks = self.forward(img) - results_dict = self.get_results(img.shape, detections, detection_masks, return_masks=return_masks) - - return results_dict \ No newline at end of file diff --git a/plotting.py b/plotting.py index d53d3e5..e29c1ce 100644 --- a/plotting.py +++ b/plotting.py @@ -1,2135 +1,2124 @@ #!/usr/bin/env python # Copyright 2019 Division of Medical Image Computing, German Cancer Research Center (DKFZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import matplotlib # matplotlib.rcParams['font.family'] = ['serif'] # matplotlib.rcParams['font.serif'] = ['Times New Roman'] matplotlib.rcParams['mathtext.fontset'] = 'cm' matplotlib.rcParams['font.family'] = 'STIXGeneral' matplotlib.use('Agg') #complains with spyder editor, bc spyder imports mpl at startup from matplotlib.ticker import FormatStrFormatter import matplotlib.colors as mcolors import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec import matplotlib.patches as mpatches from matplotlib.ticker import StrMethodFormatter, ScalarFormatter import SimpleITK as sitk from tensorboard.backend.event_processing.event_multiplexer import EventMultiplexer import sys import os import warnings from copy import deepcopy import numpy as np import pandas as pd import scipy.interpolate as interpol from utils.exp_utils import IO_safe warnings.filterwarnings("ignore", module="matplotlib.image") def make_colormap(seq): """ Return a LinearSegmentedColormap seq: a sequence of floats and RGB-tuples. The floats should be increasing and in the interval (0,1). """ seq = [(None,) * 3, 0.0] + list(seq) + [1.0, (None,) * 3] cdict = {'red': [], 'green': [], 'blue': []} for i, item in enumerate(seq): if isinstance(item, float): r1, g1, b1 = seq[i - 1] r2, g2, b2 = seq[i + 1] cdict['red'].append([item, r1, r2]) cdict['green'].append([item, g1, g2]) cdict['blue'].append([item, b1, b2]) return mcolors.LinearSegmentedColormap('CustomMap', cdict) bw_cmap = make_colormap([(1.,1.,1.), (0.,0.,0.)]) #------------------------------------------------------------------------ #------------- plotting functions, not all are used --------------------- def shape_small_first(shape): """sort a tuple so that the smallest entry is swapped to the beginning """ if len(shape) <= 2: # no changing dimensions if channel-dim is missing return shape smallest_dim = np.argmin(shape) if smallest_dim != 0: # assume that smallest dim is color channel new_shape = np.array(shape) # to support mask indexing new_shape = (new_shape[smallest_dim], *new_shape[(np.arange(len(shape), dtype=int) != smallest_dim)]) return new_shape else: return shape def RGB_to_rgb(RGB): rgb = np.array(RGB) / 255. return rgb def mod_to_rgb(arr, cmap=None): """convert a single-channel modality img to 3-color-channel img. :param arr: input img, expected in shape (b,c,)x,y with c=1 :return: img of shape (...,c') with c'=3 """ if len(arr.shape) == 3: arr = np.squeeze(arr) elif len(arr.shape) != 2: raise Exception("Invalid input arr shape: {}".format(arr.shape)) if cmap is None: cmap = "gray" norm = matplotlib.colors.Normalize() norm.autoscale(arr) arr = norm(arr) arr = np.stack((arr,) * 3, axis=-1) return arr def to_rgb(arr, cmap): """ Transform an integer-labeled segmentation map using an rgb color-map. :param arr: img_arr w/o a color-channel :param cmap: dictionary mapping from integer class labels to rgb values :return: img of shape (...,c) """ new_arr = np.zeros(shape=(arr.shape) + (3,)) for l in cmap.keys(): ixs = np.where(arr == l) new_arr[ixs] = np.array([cmap[l][i] for i in range(3)]) return new_arr def to_rgba(arr, cmap): """ Transform an integer-labeled segmentation map using an rgba color-map. :param arr: img_arr w/o a color-channel :param cmap: dictionary mapping from integer class labels to rgba values :return: new array holding rgba-image """ new_arr = np.zeros(shape=(arr.shape) + (4,)) for lab, val in cmap.items(): # in case no alpha, complement with 100% alpha if len(val) == 3: cmap[lab] = (*val, 1.) assert len(cmap[lab]) == 4, "cmap has color with {} entries".format(len(val)) for lab in cmap.keys(): ixs = np.where(arr == lab) rgb = np.array(cmap[lab][:3]) new_arr[ixs] = np.append(rgb, cmap[lab][3]) return new_arr def bin_seg_to_rgba(arr, color): """ Transform a continuously labelled binary segmentation map using an rgba color-map. values are expected to be 0-1, will give alpha-value :param arr: img_arr w/o a color-channel :param color: color to give img :return: new array holding rgba-image """ new_arr = np.zeros(shape=(arr.shape) + (4,)) for i in range(arr.shape[0]): for j in range(arr.shape[1]): new_arr[i][j] = (*color, arr[i][j]) return new_arr def suppress_axes_lines(ax): """ :param ax: pyplot axes object """ ax.axes.get_xaxis().set_ticks([]) ax.axes.get_yaxis().set_ticks([]) ax.spines['top'].set_visible(False) ax.spines['right'].set_visible(False) ax.spines['bottom'].set_visible(False) ax.spines['left'].set_visible(False) return def label_bar(ax, rects, labels=None, colors=None, fontsize=10): """Attach a text label above each bar displaying its height :param ax: :param rects: rectangles as returned by plt.bar() :param labels: :param colors: """ for ix, rect in enumerate(rects): height = rect.get_height() if labels is not None and labels[ix] is not None: label = labels[ix] else: label = '{:g}'.format(height) if colors is not None and colors[ix] is not None and np.any(np.array(colors[ix])<1): color = colors[ix] else: color = 'black' ax.text(rect.get_x() + rect.get_width() / 2., 1.007 * height, label, color=color, ha='center', va='bottom', - bbox=dict(facecolor=(1., 1., 1.), edgecolor='none', clip_on=True, pad=0, alpha=0.75), fontsize=fontsize) + bbox=dict(facecolor=(1., 1., 1.), edgecolor='none', clip_on=True, pad=0, alpha=0.95), fontsize=fontsize) def draw_box_into_arr(arr, box_coords, box_color=None, lw=2): """ :param arr: imgs shape, (3,y,x) :param box_coords: (x1,y1,x2,y2), in ascending order :param box_color: arr of shape (3,) :param lw: linewidth in pixels """ if box_color is None: box_color = [1., 0.4, 0.] (x1, y1, x2, y2) = box_coords[:4] arr = np.swapaxes(arr, 0, -1) arr[..., y1:y2, x1:x1 + lw, :], arr[..., y1:y2 + lw, x2:x2 + lw, :] = box_color, box_color arr[..., y1:y1 + lw, x1:x2, :], arr[..., y2:y2 + lw, x1:x2, :] = box_color, box_color arr = np.swapaxes(arr, 0, -1) return arr def draw_boxes_into_batch(imgs, batch_boxes, type2color=None, cmap=None): """ :param imgs: either the actual batch imgs or a tuple with shape of batch imgs, need to have 3 color channels, need to be rgb; """ if isinstance(imgs, tuple): img_oshp = imgs imgs = None else: img_oshp = imgs[0].shape img_shp = shape_small_first(img_oshp) # c,x/y,y/x now imgs = np.reshape(imgs, (-1, *img_shp)) box_imgs = np.empty((len(batch_boxes), *(img_shp))) for sample, boxes in enumerate(batch_boxes): # imgs in batch have shape b,c,x,y, swap c to end sample_img = np.full(img_shp, 1.) if imgs is None else imgs[sample] for box in boxes: if len(box["box_coords"]) > 0: if type2color is not None and "box_type" in box.keys(): sample_img = draw_box_into_arr(sample_img, box["box_coords"].astype(np.int32), type2color[box["box_type"]]) else: sample_img = draw_box_into_arr(sample_img, box["box_coords"].astype(np.int32)) box_imgs[sample] = sample_img return box_imgs def plot_prediction_hist(cf, spec_df, outfile, title=None, fs=11, ax=None): labels = spec_df.class_label.values preds = spec_df.pred_score.values type_list = spec_df.det_type.tolist() if hasattr(spec_df, "det_type") else None if title is None: title = outfile.split('/')[-1] + ' count:{}'.format(len(labels)) close=False if ax is None: fig = plt.figure(tight_layout=True) ax = fig.add_subplot(1,1,1) close=True ax.set_yscale('log') ax.set_xlabel("Prediction Score", fontsize=fs) ax.set_ylabel("Occurences", fontsize=fs) ax.hist(preds[labels == 0], alpha=0.3, color=cf.red, range=(0, 1), bins=50, label="fp") ax.hist(preds[labels == 1], alpha=0.3, color=cf.blue, range=(0, 1), bins=50, label="fn at score 0 and tp") ax.axvline(x=cf.min_det_thresh, alpha=1, color=cf.orange, linewidth=1.5, label="min det thresh") if type_list is not None: fp_count = type_list.count('det_fp') fn_count = type_list.count('det_fn') tp_count = type_list.count('det_tp') pos_count = fn_count + tp_count title += '\ntp:{} fp:{} fn:{} pos:{}'.format(tp_count, fp_count, fn_count, pos_count) ax.set_title(title, fontsize=fs) ax.tick_params(axis='both', which='major', labelsize=fs) ax.tick_params(axis='both', which='minor', labelsize=fs) if close: ax.legend(loc="best", fontsize=fs) if cf.server_env: IO_safe(plt.savefig, fname=outfile, _raise=False) else: plt.savefig(outfile) plt.close() def plot_wbc_n_missing(cf, df, outfile, fs=11, ax=None): """ WBC (weighted box clustering) has parameter n_missing, which shows how many boxes are missing per cluster. This function plots the average relative amount of missing boxes sorted by cluster score. :param cf: config. :param df: dataframe. :param outfile: path to save image under. :param fs: fontsize. :param ax: axes object. """ bins = np.linspace(0., 1., 10) names = ["{:.1f}".format((bins[i]+(bins[i+1]-bins[i])/2.)*100) for i in range(len(bins)-1)] classes = df.pred_class.unique() colors = [cf.class_id2label[cl_id].color for cl_id in classes] binned_df = df.copy() binned_df.loc[:,"pred_score"] = pd.cut(binned_df["pred_score"], bins) close=False if ax is None: ax = plt.subplot() close=True width = 1 / (len(classes) + 1) group_positions = np.arange(len(names)) legend_handles = [] for ix, cl_id in enumerate(classes): cl_df = binned_df[binned_df.pred_class==cl_id].groupby("pred_score").agg({"cluster_n_missing": 'mean'}) ax.bar(group_positions + ix * width, cl_df.cluster_n_missing.values, width=width, color=colors[ix], alpha=0.4 + ix / 2 / len(classes), edgecolor=colors[ix]) legend_handles.append(mpatches.Patch(color=colors[ix], label=cf.class_dict[cl_id])) title = "Fold {} WBC Missing Preds\nAverage over scores and classes: {:.1f}%".format(cf.fold, df.cluster_n_missing.mean()) ax.set_title(title, fontsize=fs) ax.legend(handles=legend_handles, title="Class", loc="best", fontsize=fs, title_fontsize=fs) ax.set_xticks(group_positions + (len(classes) - 1) * width / 2) # ax.xaxis.set_major_formatter(StrMethodFormatter('{x:.1f}')) THIS WONT WORK... no clue! ax.set_xticklabels(names) ax.tick_params(axis='both', which='major', labelsize=fs) ax.tick_params(axis='both', which='minor', labelsize=fs) ax.set_axisbelow(True) ax.grid() ax.set_ylabel(r"Average Missing Preds per Cluster (%)", fontsize=fs) ax.set_xlabel("Prediction Score", fontsize=fs) if close: if cf.server_env: IO_safe(plt.savefig, fname=outfile, _raise=False) else: plt.savefig(outfile) plt.close() def plot_stat_curves(cf, stats, outfile, fill=False): """ Plot precision-recall and/or receiver-operating-characteristic curve(s). :param cf: config. :param stats: statistics as supplied by Evaluator. :param outfile: path to save plot under. :param fill: whether to colorize space between plot and x-axis. :return: """ for c in ['roc', 'prc']: plt.figure() empty_plot = True for ix, s in enumerate(stats): if s[c] is not np.nan: plt.plot(s[c][1], s[c][0], label=s['name'] + '_' + c, marker=None, color=cf.color_palette[ix%len(cf.color_palette)]) empty_plot = False if fill: plt.fill_between(s[c][1], s[c][0], alpha=0.33, color=cf.color_palette[ix%len(cf.color_palette)]) if not empty_plot: plt.title(outfile.split('/')[-1] + '_' + c) plt.legend(loc=3 if c == 'prc' else 4) plt.ylabel('precision' if c == 'prc' else '1-spec.') plt.ylim((0.,1)) plt.xlabel('recall') plt.savefig(outfile + '_' + c) plt.close() def plot_grouped_bar_chart(cf, bar_values, groups, splits, colors=None, alphas=None, errors=None, ylabel='', xlabel='', - xticklabels=None, yticks=None, yticklabels=None, ylim=None, label_format="{:.3f}", title=None, ax=None, out_file=None, legend=False, fs=11): """ Plot a categorically grouped bar chart. :param cf: config. :param bar_values: values of the bars. :param groups: groups/categories that bars belong to. :param splits: splits within groups, i.e., names of bars. :param colors: colors. :param alphas: 1-opacity. :param errors: values for errorbars. :param ylabel: label of y-axis. :param xlabel: label of x-axis. :param title: plot title. :param ax: axes object to draw into. if None, new is created. :param out_file: path to save plot. :param legend: whether to show a legend. :param fs: fontsize. :return: legend handles. """ bar_values = np.array(bar_values) if alphas is None: alphas = [1.,] * len(splits) if colors is None: colors = [cf.color_palette[ix%len(cf.color_palette)] for ix in range(len(splits))] if errors is None: errors = np.zeros_like(bar_values) # patterns = ('/', '\\', '*', 'O', '.', '-', '+', 'x', 'o') # patterns = tuple([patterns[ix%len(patterns)] for ix in range(len(splits))]) close=False if ax is None: ax = plt.subplot() close=True - width = 1 / (len(splits) +0.25) + width = 1 / (len(splits) + 1) group_positions = np.arange(len(groups)) for ix, split in enumerate(splits): rects = ax.bar(group_positions + ix * width, bar_values[ix], width=width, color=(*colors[ix], 0.8), edgecolor=colors[ix], yerr=errors[ix], ecolor=(*np.array(colors[ix])*0.8, 1.), capsize=5) # for ix, bar in enumerate(rects): # bar.set_hatch(patterns[ix]) - labels = [label_format.format(val) for val in bar_values[ix]] + labels = ["{:.2f}".format(val) for val in bar_values[ix]] label_bar(ax, rects, labels, [colors[ix]]*len(labels), fontsize=fs) legend_handles = [mpatches.Patch(color=colors[ix], alpha=alphas[ix], label=split) for ix, split in enumerate(splits)] if legend: - ax.legend(handles=legend_handles, fancybox=True, framealpha=1., loc="lower center") + ax.legend(handles=legend_handles) legend_handles = [(colors[ix], alphas[ix], split) for ix, split in enumerate(splits)] if title is not None: ax.set_title(title, fontsize=fs) ax.set_xticks(group_positions + (len(splits) - 1) * width / 2) - if xticklabels is None: - ax.set_xticklabels(groups, fontsize=fs) - else: - ax.set_xticklabels(xticklabels, fontsize=fs) + ax.set_xticklabels(groups, fontsize=fs) ax.set_axisbelow(True) - ax.set_xlabel(xlabel, fontsize=fs) - ax.tick_params(labelsize=fs) - ax.grid(axis='y') ax.set_ylabel(ylabel, fontsize=fs) - if yticks is not None: - ax.set_yticks(yticks) - if yticklabels is not None: - ax.set_yticklabels(yticklabels, fontsize=fs) - if ylim is not None: - ax.set_ylim(ylim) + ax.set_xlabel(xlabel, fontsize=fs) + ax.tick_params(labelsize=fs) if out_file is not None: plt.savefig(out_file, dpi=600) if close: plt.close() return legend_handles def plot_binned_rater_dissent(cf, binned_stats, out_file=None, ax=None, legend=True, fs=11): """ LIDC-specific plot: rater disagreement as standard deviations within each bin. :param cf: config. :param binned_stats: list, ix==bin_id, item: [(roi_mean, roi_std, roi_max, roi_bin_id-roi_max_bin_id) for roi in bin] :return: """ dissent = [np.array([roi[1] for roi in bin]) for bin in binned_stats] avg_dissent_first_degree = [np.mean(bin) for bin in dissent] groups = list(cf.bin_id2label.keys()) splits = [r"$1^{st}$ std. dev.",] colors = [cf.bin_id2label[bin_id].color[:3] for bin_id in groups] #colors = [cf.blue for bin_id in groups] alphas = [0.9,] #patterns = ('/', '\\', '*', 'O', '.', '-', '+', 'x', 'o') #patterns = tuple([patterns[ix%len(patterns)] for ix in range(len(splits))]) close=False if ax is None: ax = plt.subplot() close=True width = 1/(len(splits)+1) group_positions = np.arange(len(groups)) #total_counts = [df.loc[split].sum() for split in splits] dissent = np.array(avg_dissent_first_degree) ix=0 rects = ax.bar(group_positions+ix*width, dissent, color=colors, alpha=alphas[ix], edgecolor=colors) #for ix, bar in enumerate(rects): #bar.set_hatch(patterns[ix]) labels = ["{:.2f}".format(diss) for diss in dissent] label_bar(ax, rects, labels, colors, fontsize=fs) bin_edge_color = cf.blue ax.axhline(y=0.5, color=bin_edge_color) ax.text(2.5, 0.38, "bin edge", color=cf.white, fontsize=fs, horizontalalignment="center", bbox=dict(boxstyle='round', facecolor=(*bin_edge_color, 0.85), edgecolor='none', clip_on=True, pad=0)) if legend: legend_handles = [mpatches.Patch(color=cf.blue ,alpha=alphas[ix], label=split) for ix, split in enumerate(splits)] ax.legend(handles=legend_handles, loc='lower center', fontsize=fs) title = "LIDC-IDRI: Average Std Deviation per Lesion" plt.title(title) ax.set_xticks(group_positions + (len(splits)-1)*width/2) ax.set_xticklabels(groups, fontsize=fs) ax.set_axisbelow(True) #ax.tick_params(axis='both', which='major', labelsize=fs) #ax.tick_params(axis='both', which='minor', labelsize=fs) ax.grid() ax.set_ylabel(r"Average Dissent (MS)", fontsize=fs) ax.set_xlabel("binned malignancy-score value (ms)", fontsize=fs) ax.tick_params(labelsize=fs) if out_file is not None: plt.savefig(out_file, dpi=600) if close: plt.close() return def plot_confusion_matrix(cf, cm, out_file=None, ax=None, fs=11, cmap=plt.cm.Blues, color_bar=True): """ Plot a confusion matrix. :param cf: config. :param cm: confusion matrix, e.g., as supplied by metrics.confusion_matrix from scikit-learn. :return: """ close=False if ax is None: ax = plt.subplot() close=True im = ax.imshow(cm, interpolation='nearest', cmap=cmap) if color_bar: ax.figure.colorbar(im, ax=ax) # Rotate the tick labels and set their alignment. #plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor") # Loop over data dimensions and create text annotations. fmt = '.0%' if np.mod(cm, 1).any() else 'd' thresh = cm.max() / 2. for i in range(cm.shape[0]): for j in range(cm.shape[1]): ax.text(j, i, format(cm[i, j], fmt), ha="center", va="center", color="white" if cm[i, j] > thresh else "black") ax.set_ylabel(r"Binned Mean MS", fontsize=fs) ax.set_xlabel("Single-Annotator MS", fontsize=fs) #ax.tick_params(labelsize=fs) if close and out_file is not None: plt.savefig(out_file, dpi=600) if close: plt.close() else: return ax def plot_data_stats(cf, df, labels=None, out_file=None, ax=None, fs=11): """ Plot data-set statistics. Shows target counts. Mainly used by Dataset Class in dataloader.py. :param cf: configs obj :param df: pandas dataframe :param out_file: path to save fig in """ names = df.columns if labels is not None: colors = [label.color for name in names for label in labels if label.name==name] else: colors = [cf.color_palette[ix%len(cf.color_palette)] for ix in range(len(names))] #patterns = ('/', '\\', '*', 'O', '.', '-', '+', 'x', 'o') #patterns = tuple([patterns[ix%len(patterns)] for ix in range(len(splits))]) if ax is None: fig, ax = plt.subplots(figsize=(14,6), dpi=300) return_ax = False else: return_ax = True plt.margins(x=0.01) plt.subplots_adjust(bottom=0.15) bar_positions = np.arange(len(names)) name_counts = df.sum() total_count = name_counts.sum() rects = ax.bar(bar_positions, name_counts, color=colors, alpha=0.9, edgecolor=colors) labels = ["{:.0f}%".format(count/ total_count*100) for count in name_counts] label_bar(ax, rects, labels, colors, fontsize=fs) title= "Data Set RoI-Target Balance\nTotal #RoIs: {}".format(int(total_count)) ax.set_title(title, fontsize=fs) ax.set_xticks(bar_positions) rotation = "vertical" if np.any([len(str(name)) > 3 for name in names]) else None if all([isinstance(name, (float, int)) for name in names]): ax.set_xticklabels(["{:.2f}".format(name) for name in names], rotation=rotation, fontsize=fs) else: ax.set_xticklabels(names, rotation=rotation, fontsize=fs) ax.set_axisbelow(True) ax.grid() ax.set_ylabel(r"#RoIs", fontsize=fs) ax.set_xlabel(str(df._metadata[0]), fontsize=fs) ax.tick_params(axis='both', which='major', labelsize=fs) ax.tick_params(axis='both', which='minor', labelsize=fs) if out_file is not None: plt.savefig(out_file) if return_ax: return ax else: plt.close() def plot_fold_stats(cf, df, labels=None, out_file=None, ax=None): """ Similar as plot_data_stats but per single cross-val fold. :param cf: configs obj :param df: pandas dataframe :param out_file: path to save fig in """ names = df.columns splits = df.index if labels is not None: colors = [label.color for name in names for label in labels if label.name==name] else: colors = [cf.color_palette[ix%len(cf.color_palette)] for ix in range(len(names))] #patterns = ('/', '\\', '*', 'O', '.', '-', '+', 'x', 'o') #patterns = tuple([patterns[ix%len(patterns)] for ix in range(len(splits))]) if ax is None: ax = plt.subplot() return_ax = False else: return_ax = True width = 1/(len(names)+1) group_positions = np.arange(len(splits)) legend_handles = [] total_counts = [df.loc[split].sum() for split in splits] for ix, name in enumerate(names): rects = ax.bar(group_positions+ix*width, df.loc[:,name], width=width, color=colors[ix], alpha=0.9, edgecolor=colors[ix]) #for ix, bar in enumerate(rects): #bar.set_hatch(patterns[ix]) labels = ["{:.0f}%".format(df.loc[split, name]/ total_counts[ii]*100) for ii, split in enumerate(splits)] label_bar(ax, rects, labels, [colors[ix]]*len(group_positions)) legend_handles.append(mpatches.Patch(color=colors[ix] ,alpha=0.9, label=name)) title= "Fold {} RoI-Target Balances\nTotal #RoIs: {}".format(cf.fold, int(df.values.sum())) plt.title(title) ax.legend(handles=legend_handles) ax.set_xticks(group_positions + (len(names)-1)*width/2) ax.set_xticklabels(splits, rotation="vertical" if len(splits)>2 else None, size=12) ax.set_axisbelow(True) ax.grid() ax.set_ylabel(r"#RoIs") ax.set_xlabel("Set split") if out_file is not None: plt.savefig(out_file) if return_ax: return ax plt.close() def plot_batchgen_distribution(cf, pids, p_probs, balance_target, out_file=None): """plot top n_pids probabilities for drawing a pid into a batch. :param cf: experiment config object :param pids: sorted iterable of patient ids :param p_probs: pid's drawing likelihood, order needs to match the one of pids. :param out_file: :return: """ n_pids = len(pids) zip_sorted = np.array(sorted(list(zip(p_probs, pids)), reverse=True)) names, probs = zip_sorted[:n_pids,1], zip_sorted[:n_pids,0].astype('float32') * 100 try: names = [str(int(n)) for n in names] except ValueError: names = [str(n) for n in names] lowest_p = min(p_probs)*100 fig, ax = plt.subplots(1,1,figsize=(17,5), dpi=200) rects = ax.bar(names, probs, color=cf.blue, alpha=0.9, edgecolor=cf.blue) ax = plt.gca() ax.text(0.8, 0.92, "Lowest prob.: {:.5f}%".format(lowest_p), transform=ax.transAxes, color=cf.white, bbox=dict(boxstyle='round', facecolor=cf.blue, edgecolor='none', alpha=0.9)) ax.yaxis.set_major_formatter(StrMethodFormatter('{x:g}')) ax.set_xticklabels(names, rotation="vertical", fontsize=7) plt.margins(x=0.01) plt.subplots_adjust(bottom=0.15) if balance_target=="class_targets": balance_target = "Class" elif balance_target=="lesion_gleasons": balance_target = "GS" ax.set_title(str(balance_target)+"-Balanced Train Generator: Sampling Likelihood per PID") ax.set_axisbelow(True) ax.grid(axis='y') ax.set_ylabel("Sampling Likelihood (%)") ax.set_xlabel("PID") plt.tight_layout() if out_file is not None: plt.savefig(out_file) plt.close() def plot_batchgen_stats(cf, stats, target_name, unique_ts, out_file=None): """Plot bar chart showing RoI frequencies and empty-sample count of batch stats recorded by BatchGenerator. :param cf: config. :param stats: statistics as supplied by BatchGenerator class. :param out_file: path to save plot. """ total_samples = cf.num_epochs*cf.num_train_batches*cf.batch_size if target_name=="class_targets": target_name = "Class" label_dict = {cl_id: label for (cl_id, label) in cf.class_id2label.items()} elif target_name=="lesion_gleasons": target_name = "Lesion's Gleason Score" label_dict = cf.gs2label elif target_name=="rg_bin_targets": target_name = "Regression-Bin ID" label_dict = cf.bin_id2label else: raise NotImplementedError names = [label_dict[t_id].name for t_id in unique_ts] colors = [label_dict[t_id].color for t_id in unique_ts] title = "Training Target Frequencies" title += "\nempty samples: {} ({:.1f}%)".format(stats['empty_samples_count'], stats['empty_samples_count']/total_samples*100) rects = plt.bar(names, stats['roi_counts'], color=colors, alpha=0.9, edgecolor=colors) ax = plt.gca() ax.yaxis.set_major_formatter(StrMethodFormatter('{x:g}')) ax.set_title(title) ax.set_axisbelow(True) ax.grid() ax.set_ylabel(r"#RoIs") ax.set_xlabel(target_name) total_count = np.sum(stats["roi_counts"]) labels = ["{:.0f}%".format(count/total_count*100) for count in stats["roi_counts"]] label_bar(ax, rects, labels, colors) if out_file is not None: plt.savefig(out_file) plt.close() def view_3D_array(arr, outfile, elev=30, azim=30): from mpl_toolkits.mplot3d import Axes3D fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.set_aspect("equal") ax.set_xlabel("x") ax.set_ylabel("y") ax.set_zlabel("z") ax.voxels(arr) ax.view_init(elev=elev, azim=azim) plt.savefig(outfile) def view_batch(cf, batch, res_dict=None, out_file=None, legend=True, show_info=True, has_colorchannels=False, isRGB=True, show_seg_ids="all", show_seg_pred=True, show_gt_boxes=True, show_gt_labels=False, roi_items="all", sample_picks=None, vol_slice_picks=None, box_score_thres=None, plot_mods=True, dpi=200, vmin=None, return_fig=False): r""" View data and target entries of a batch. Batch expected as dic with entries 'data' and 'seg' holding np.arrays of size :math:`batch\_size \times modalities \times h \times w` for data and :math:`batch\_size \times classes \times h \times w` or :math:`batch\_size \times 1 \times h \times w` for segs. Classes, even if just dummy, are always needed for plotting since they determine colors. Pyplot expects dimensions in order y,x,chans (height, width, chans) for imshow. :param cf: config. :param batch: batch. :param res_dict: results dictionary. :param out_file: path to save plot. :param legend: whether to show a legend. :param show_info: whether to show text info about img sizes and type in plot. :param has_colorchannels: whether image has color channels. :param isRGB: if image is RGB. :param show_seg_ids: "all" or None or list with seg classes to show (seg_ids) :param show_seg_pred: whether to the predicted segmentation. :param show_gt_boxes: whether to show ground-truth boxes. :param show_gt_labels: whether to show labels of ground-truth boxes. :param roi_items: which roi items to show: strings "all" or "targets". --> all roi items in cf.roi_items or only those which are targets, or list holding keys/names of entries in cf.roi_items to plot additionally on roi boxes. empty iterator to show none. :param sample_picks: which indices of the batch to display. None for all. :param vol_slice_picks: when batch elements are 3D: which slices to display. None for all, or tuples ("random", int: amt) / (float€[0,1]: fg_prob, int: amt) for random pick / fg_slices pick w probability fg_prob of amt slices. fg pick requires gt seg. :param box_score_thres: plot only boxes with pred_score > box_score_thres. None or 0. for no threshold. :param plot_mods: whether to plot input modality/modalities. :param dpi: graphics resolution. :param vmin: min value for gray-scale cmap in imshow, set to a fix value for inter-batch normalization, or None for intra-batch. :param return_fig: whether to return created figure. """ # pfix = prefix, ptfix = postfix patched_patient = 'patch_crop_coords' in list(batch.keys()) pfix = 'patient_' if patched_patient else '' ptfix = '_2d' if (patched_patient and cf.dim == 2 and pfix + 'class_targets_2d' in batch.keys()) else '' # -------------- get data, set flags ----------------- try: btype = type(batch[pfix + 'data']) data = batch[pfix + 'data'].astype("float32") seg = batch[pfix + 'seg'] except AttributeError: # in this case: assume it's single-annotator ground truths btype = type(batch[pfix + 'data']) data = batch[pfix + 'data'].astype("float32") seg = batch[pfix + 'seg'][0] print("Showing only gts of rater 0") data_init_shp, seg_init_shp = data.shape, seg.shape seg = np.copy(seg) if show_seg_ids else None plot_bg = batch['plot_bg'] if 'plot_bg' in batch.keys() and not isinstance(batch['plot_bg'], (int, float)) else None plot_bg_chan = batch['plot_bg'] if 'plot_bg' in batch.keys() and isinstance(batch['plot_bg'], (int, float)) else 0 gt_boxes = batch[pfix+'bb_target'+ptfix] if pfix+'bb_target'+ptfix in batch.keys() and show_gt_boxes else None class_targets = batch[pfix+'class_targets'+ptfix] if pfix+'class_targets'+ptfix in batch.keys() else None cf_roi_items = [pfix+it+ptfix for it in cf.roi_items] if roi_items == "all": roi_items = [it for it in cf_roi_items] elif roi_items == "targets": roi_items = [it for it in cf_roi_items if 'targets' in it] else: roi_items = [it for it in cf_roi_items if it in roi_items] if res_dict is not None: seg_preds = res_dict["seg_preds"] if (show_seg_pred is not None and 'seg_preds' in res_dict.keys() and show_seg_ids) else None if '2D_boxes' in res_dict.keys(): assert cf.dim==2 pr_boxes = res_dict["2D_boxes"] elif 'boxes' in res_dict.keys(): pr_boxes = res_dict["boxes"] else: pr_boxes = None else: seg_preds = None pr_boxes = None # -------------- get shapes, apply sample selection ----------------- (n_samples, mods, h, w), d = data.shape[:4], 0 z_ics = [slice(None)] if has_colorchannels: #has to be 2D data = np.transpose(data, axes=(0, 2, 3, 1)) # now b,y,x,c mods = 1 else: if len(data.shape) == 5: # 3dim case d = data.shape[4] if vol_slice_picks is None: z_ics = np.arange(0, d) elif hasattr(vol_slice_picks, "__iter__") and vol_slice_picks[0]=="random": z_ics = np.random.choice(np.arange(0, d), size=min(vol_slice_picks[1], d), replace=False) else: z_ics = vol_slice_picks sample_ics = range(n_samples) # 8000 approx value of pixels that are displayable in one figure dim (pyplot has a render limit), depends on dpi however if data.shape[0]*data.shape[2]*len(z_ics)>8000: n_picks = max(1, int(8000/(data.shape[2]*len(z_ics)))) if len(z_ics)>1 and vol_slice_picks is None: z_ics = np.random.choice(np.arange(0, data.shape[4]), size=min(data.shape[4], max(1,int(8000/(n_picks*data.shape[2])))), replace=False) if sample_picks is None: sample_picks = np.random.choice(data.shape[0], n_picks, replace=False) if sample_picks is not None: sample_ics = [s for s in sample_picks if s in sample_ics] n_samples = len(sample_ics) if not plot_mods: mods = 0 if show_seg_ids=="all": show_seg_ids = np.unique(seg) if seg_preds is not None and not type(show_seg_ids)==str: seg_preds = np.copy(seg_preds) seg_preds = np.where(np.isin(seg_preds, show_seg_ids), seg_preds, 0) if seg is not None: if not type(show_seg_ids)==str: #to save time seg = np.where(np.isin(seg, show_seg_ids), seg, 0) legend_items = {cf.seg_id2label[seg_id] for seg_id in np.unique(seg) if seg_id != 0} # add seg labels else: legend_items = set() # -------------- setup figure ----------------- if isRGB: data = RGB_to_rgb(data) if plot_bg is not None: plot_bg = RGB_to_rgb(plot_bg) n_cols = mods if seg is not None or gt_boxes is not None: n_cols += 1 if seg_preds is not None or pr_boxes is not None: n_cols += 1 n_rows = n_samples*len(z_ics) grid = gridspec.GridSpec(n_rows, n_cols, wspace=0.01, hspace=0.0) fig = plt.figure(figsize=((n_cols + 1)*2, n_rows*2), tight_layout=True) title_fs = 12 # fontsize sample_ics, z_ics = sorted(sample_ics), sorted(z_ics) row = 0 # current row for s_count, s_ix in enumerate(sample_ics): for z_ix in z_ics: col = 0 # current col # ----visualise input data ------------- if has_colorchannels: if plot_mods: ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix][...,z_ix]) ax.axis("off") if row == 0: plt.title("Input", fontsize=title_fs) if col == 0: specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix][...,z_ix] else: for mod in range(mods): ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix, mod][...,z_ix], cmap="gray", vmin=vmin) suppress_axes_lines(ax) if row == 0: plt.title("Mod. " + str(mod), fontsize=title_fs) if col == 0: specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix)==slice else z_ix ylabel = str(specs[s_ix])[-5:]+"/"+str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix, plot_bg_chan][...,z_ix] # ---evtly visualise groundtruths------------------- if seg is not None or gt_boxes is not None: # img as bg for gt ax = fig.add_subplot(grid[row, col]) ax.imshow(bg_img, cmap="gray", vmin=vmin) if row == 0: plt.title("Ground Truth", fontsize=title_fs) if col == 0: specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number suppress_axes_lines(ax) else: plt.axis('off') col += 1 if seg is not None and seg.shape[1] == 1: ax.imshow(to_rgba(seg[s_ix][0][...,z_ix], cf.cmap), alpha=0.8) elif seg is not None: ax.imshow(to_rgba(np.argmax(seg[s_ix][...,z_ix], axis=0), cf.cmap), alpha=0.8) # gt bounding boxes if gt_boxes is not None and len(gt_boxes[s_ix]) > 0: for j, box in enumerate(gt_boxes[s_ix]): if d > 0: [z1, z2] = box[4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box[:4] width, height = x2 - x1, y2 - y1 if class_targets is not None: label = cf.class_id2label[class_targets[s_ix][j]] legend_items.add(label) if show_gt_labels: text_poss, p = [(x1, y1), (x1, (y1+y2)//2)], 0 text_fs = title_fs // 3 if roi_items is not None: for name in roi_items: if name in cf_roi_items and batch[name][s_ix][j] is not None: if 'class_targets' in name and cf.plot_class_ids: text_x = x2 #- 2 * text_fs * (len(str(class_targets[s_ix][j]))) # avoid overlap of scores text_y = y1 #+ 2 * text_fs text_str = '{}'.format(class_targets[s_ix][j]) elif 'regression_targets' in name: text_x, text_y = (x2, y2) text_str = "[" + " ".join( ["{:.1f}".format(x) for x in batch[name][s_ix][j]]) + "]" elif 'rg_bin_targets' in name: text_x, text_y = (x1, y2) text_str = '{}'.format(batch[name][s_ix][j]) else: text_pos = text_poss.pop(0) text_x = text_pos[0] #- 2 * text_fs * len(str(batch[name][s_ix][j])) text_y = text_pos[1] #+ 2 * text_fs text_str = '{}'.format(batch[name][s_ix][j]) ax.text(text_x, text_y, text_str, color=cf.white, fontsize=text_fs, bbox=dict(facecolor=label.color, alpha=0.7, edgecolor='none', clip_on=True, pad=0)) p+=1 bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=label.color, facecolor='none') ax.add_patch(bbox) # -----evtly visualise predictions ------------- if pr_boxes is not None or seg_preds is not None: ax = fig.add_subplot(grid[row, col]) ax.imshow(bg_img, cmap="gray") ax.axis("off") col += 1 if row == 0: plt.title("Prediction", fontsize=title_fs) # ---------- pred boxes ------------------------- if pr_boxes is not None and len(pr_boxes[s_ix]) > 0: box_score_thres = cf.min_det_thresh if box_score_thres is None else box_score_thres for j, box in enumerate(pr_boxes[s_ix]): plot_box = box["box_type"] in ["det", "prop"] # , "pos_anchor", "neg_anchor"] if box["box_type"] == "det" and (float(box["box_score"]) <= box_score_thres or box["box_pred_class_id"] == 0): plot_box = False if plot_box: if d > 0: [z1, z2] = box["box_coords"][4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box["box_coords"][:4] width, height = x2 - x1, y2 - y1 if box["box_type"] == "det": label = cf.class_id2label[box["box_pred_class_id"]] legend_items.add(label) text_x, text_y = x2, y1 id_text = str(box["box_pred_class_id"]) + "|" if cf.plot_class_ids else "" text_str = '{}{:.0f}'.format(id_text, box["box_score"] * 100) text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) edgecolor = label.color if 'regression' in box.keys(): text_x, text_y = x2, y2 id_text = "["+" ".join(["{:.1f}".format(x) for x in box["regression"]])+"]" #str(box["regression"]) #+ "|" if cf.plot_class_ids else "" if 'rg_uncertainty' in box.keys() and not np.isnan(box['rg_uncertainty']): id_text += " | {:.1f}".format(box['rg_uncertainty']) text_str = '{}'.format(id_text) #, box["box_score"] * 100) text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) if 'rg_bin' in box.keys(): text_x, text_y = x1, y2 text_str = '{}'.format(box["rg_bin"]) text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) else: label = cf.box_type2label[box["box_type"]] legend_items.add(label) edgecolor = label.color bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=edgecolor, facecolor='none') ax.add_patch(bbox) # ------------ pred segs -------- if seg_preds is not None: # and seg_preds.shape[1] == 1: if cf.class_specific_seg: ax.imshow(to_rgba(seg_preds[s_ix][0][...,z_ix], cf.cmap), alpha=0.8) else: ax.imshow(bin_seg_to_rgba(seg_preds[s_ix][0][...,z_ix], cf.orange), alpha=0.8) row += 1 # -----actions for all batch entries---------- if legend and len(legend_items) > 0: patches = [] for label in legend_items: if cf.plot_class_ids and type(label) != type(cf.box_labels[0]): id_text = str(label.id) + ":" else: id_text = "" patches.append(mpatches.Patch(color=label.color, label="{}{:.10s}".format(id_text, label.name))) # assumes one image gives enough y-space for 5 legend items ncols = max(1, len(legend_items) // (5 * n_samples)) plt.figlegend(handles=patches, loc="upper center", bbox_to_anchor=(0.99, 0.86), borderaxespad=0., ncol=ncols, bbox_transform=fig.transFigure, fontsize=int(2/3*title_fs)) # fig.set_size_inches(mods+3+ncols-1,1.5+1.2*n_samples) if show_info: plt.figtext(0, 0, "Batch content is of type\n{}\nand has shapes\n".format(btype) + \ "{} for 'data' and {} for 'seg'".format(data_init_shp, seg_init_shp)) if out_file is not None: if cf.server_env: IO_safe(plt.savefig, fname=out_file, dpi=dpi, pad_inches=0.0, bbox_inches='tight', _raise=False) else: plt.savefig(out_file, dpi=dpi, pad_inches=0.0, bbox_inches='tight') if return_fig: return plt.gcf() plt.clf() plt.close() def view_batch_paper(cf, batch, res_dict=None, out_file=None, legend=True, show_info=True, has_colorchannels=False, isRGB=True, show_seg_ids="all", show_seg_pred=True, show_gt_boxes=True, show_gt_labels=False, roi_items="all", split_ens_ics=False, server_env=True, sample_picks=None, vol_slice_picks=None, patient_items=False, box_score_thres=None, plot_mods=True, dpi=400, vmin=None, return_fig=False): r"""view data and target entries of a batch. batch expected as dic with entries 'data' and 'seg' holding tensors or nparrays of size :math:`batch\_size \times modalities \times h \times w` for data and :math:`batch\_size \times classes \times h \times w` or :math:`batch\_size \times 1 \times h \times w` for segs. Classes, even if just dummy, are always needed for plotting since they determine colors. :param cf: :param batch: :param res_dict: :param out_file: :param legend: :param show_info: :param has_colorchannels: :param isRGB: :param show_seg_ids: :param show_seg_pred: :param show_gt_boxes: :param show_gt_labels: :param roi_items: strings "all" or "targets" --> all roi items in cf.roi_items or only those which are targets, or list holding keys/names of entries in cf.roi_items to plot additionally on roi boxes. empty iterator to show none. :param split_ens_ics: :param server_env: :param sample_picks: which indices of the batch to display. None for all. :param vol_slice_picks: when batch elements are 3D: which slices to display. None for all, or tuples ("random", int: amt) / (float€[0,1]: fg_prob, int: amt) for random pick / fg_slices pick w probability fg_prob of amt slices. fg pick requires gt seg. :param patient_items: set to true if patient-wise batch items should be displayed (need to be contained in batch and marked via 'patient_' prefix. :param box_score_thres: plot only boxes with pred_score > box_score_thres. None or 0. for no thres. :param plot_mods: :param dpi: graphics resolution :param vmin: min value for gs cmap in imshow, set to fix inter-batch, or None for intra-batch. pyplot expects dimensions in order y,x,chans (height, width, chans) for imshow. show_seg_ids: "all" or None or list with seg classes to show (seg_ids) """ # pfix = prefix, ptfix = postfix pfix = 'patient_' if patient_items else '' ptfix = '_2d' if (patient_items and cf.dim==2) else '' # -------------- get data, set flags ----------------- btype = type(batch[pfix + 'data']) data = batch[pfix + 'data'].astype("float32") seg = batch[pfix + 'seg'] # seg = np.array(seg).mean(axis=0, keepdims=True) # seg[seg>0] = 1. print("Showing multirater GT") data_init_shp, seg_init_shp = data.shape, seg.shape fg_slices = np.where(np.sum(np.sum(np.squeeze(seg), axis=0), axis=0)>0)[0] if len(fg_slices)==0: print("skipping empty patient") return if vol_slice_picks is None: vol_slice_picks = fg_slices print("data shp, seg shp", data_init_shp, seg_init_shp) plot_bg = batch['plot_bg'] if 'plot_bg' in batch.keys() and not isinstance(batch['plot_bg'], (int, float)) else None plot_bg_chan = batch['plot_bg'] if 'plot_bg' in batch.keys() and isinstance(batch['plot_bg'], (int, float)) else 0 gt_boxes = batch[pfix+'bb_target'+ptfix] if pfix+'bb_target'+ptfix in batch.keys() and show_gt_boxes else None class_targets = batch[pfix+'class_targets'+ptfix] if pfix+'class_targets'+ptfix in batch.keys() else None cf_roi_items = [pfix+it+ptfix for it in cf.roi_items] if roi_items == "all": roi_items = [it for it in cf_roi_items] elif roi_items == "targets": roi_items = [it for it in cf_roi_items if 'targets' in it] else: roi_items = [it for it in cf_roi_items if it in roi_items] if res_dict is not None: seg_preds = res_dict["seg_preds"] if (show_seg_pred is not None and 'seg_preds' in res_dict.keys() and show_seg_ids) else None if '2D_boxes' in res_dict.keys(): assert cf.dim==2 pr_boxes = res_dict["2D_boxes"] elif 'boxes' in res_dict.keys(): pr_boxes = res_dict["boxes"] else: pr_boxes = None else: seg_preds = None pr_boxes = None # -------------- get shapes, apply sample selection ----------------- (n_samples, mods, h, w), d = data.shape[:4], 0 z_ics = [slice(None)] if has_colorchannels: #has to be 2D data = np.transpose(data, axes=(0, 2, 3, 1)) # now b,y,x,c mods = 1 else: if len(data.shape) == 5: # 3dim case d = data.shape[4] if vol_slice_picks is None: z_ics = np.arange(0, d) # elif hasattr(vol_slice_picks, "__iter__") and vol_slice_picks[0]=="random": # z_ics = np.random.choice(np.arange(0, d), size=min(vol_slice_picks[1], d), replace=False) else: z_ics = vol_slice_picks sample_ics = range(n_samples) # 8000 approx value of pixels that are displayable in one figure dim (pyplot has a render limit), depends on dpi however if data.shape[0]*data.shape[2]*len(z_ics)>8000: n_picks = max(1, int(8000/(data.shape[2]*len(z_ics)))) if len(z_ics)>1: if vol_slice_picks is None: z_ics = np.random.choice(np.arange(0, data.shape[4]), size=min(data.shape[4], max(1,int(8000/(n_picks*data.shape[2])))), replace=False) else: z_ics = np.random.choice(vol_slice_picks, size=min(len(vol_slice_picks), max(1,int(8000/(n_picks*data.shape[2])))), replace=False) if sample_picks is None: sample_picks = np.random.choice(data.shape[0], n_picks, replace=False) if sample_picks is not None: sample_ics = [s for s in sample_picks if s in sample_ics] n_samples = len(sample_ics) if not plot_mods: mods = 0 if show_seg_ids=="all": show_seg_ids = np.unique(seg) legend_items = set() # -------------- setup figure ----------------- if isRGB: data = RGB_to_rgb(data) if plot_bg is not None: plot_bg = RGB_to_rgb(plot_bg) n_cols = mods if seg is not None or gt_boxes is not None: n_cols += 1 if seg_preds is not None or pr_boxes is not None: n_cols += 1 n_rows = n_samples*len(z_ics) grid = gridspec.GridSpec(n_rows, n_cols, wspace=0.01, hspace=0.0) fig = plt.figure(figsize=((n_cols + 1)*2, n_rows*2), tight_layout=True) title_fs = 12 # fontsize sample_ics, z_ics = sorted(sample_ics), sorted(z_ics) row = 0 # current row for s_count, s_ix in enumerate(sample_ics): for z_ix in z_ics: col = 0 # current col # ----visualise input data ------------- if has_colorchannels: if plot_mods: ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix][...,z_ix]) ax.axis("off") if row == 0: plt.title("Input", fontsize=title_fs) if col == 0: # key = "spec" if "spec" in batch.keys() else "pid" specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix][...,z_ix] else: for mod in range(mods): ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix, mod][...,z_ix], cmap="gray", vmin=vmin) suppress_axes_lines(ax) if row == 0: plt.title("Mod. " + str(mod), fontsize=title_fs) if col == 0: # key = "spec" if "spec" in batch.keys() else "pid" specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix)==slice else z_ix ylabel = str(specs[s_ix])[-5:]+"/"+str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix, plot_bg_chan][...,z_ix] # ---evtly visualise groundtruths------------------- if seg is not None or gt_boxes is not None: # img as bg for gt ax = fig.add_subplot(grid[row, col]) ax.imshow(bg_img, cmap="gray", vmin=vmin) if row == 0: plt.title("Ground Truth+ Pred", fontsize=title_fs) if col == 0: specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number suppress_axes_lines(ax) else: plt.axis('off') col += 1 if seg is not None and seg.shape[1] == 1: cmap = {1: cf.orange} ax.imshow(to_rgba(seg[s_ix][0][...,z_ix], cmap), alpha=0.8) # gt bounding boxes if gt_boxes is not None and len(gt_boxes[s_ix]) > 0: for j, box in enumerate(gt_boxes[s_ix]): if d > 0: [z1, z2] = box[4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box[:4] # [x1,y1,x2,y2] = box[:4]#:return: coords (x1, y1, x2, y2) width, height = x2 - x1, y2 - y1 if class_targets is not None: label = cf.class_id2label[class_targets[s_ix][j]] legend_items.add(label) if show_gt_labels and cf.plot_class_ids: text_poss, p = [(x1, y1), (x1, (y1+y2)//2)], 0 text_fs = title_fs // 3 if roi_items is not None: for name in roi_items: if name in cf_roi_items and batch[name][s_ix][j] is not None: if 'class_targets' in name: text_x = x2 #- 2 * text_fs * (len(str(class_targets[s_ix][j]))) # avoid overlap of scores text_y = y1 #+ 2 * text_fs text_str = '{}'.format(class_targets[s_ix][j]) elif 'regression_targets' in name: text_x, text_y = (x2, y2) text_str = "[" + " ".join( ["{:.1f}".format(x) for x in batch[name][s_ix][j]]) + "]" elif 'rg_bin_targets' in name: text_x, text_y = (x1, y2) text_str = '{}'.format(batch[name][s_ix][j]) else: text_pos = text_poss.pop(0) text_x = text_pos[0] #- 2 * text_fs * len(str(batch[name][s_ix][j])) text_y = text_pos[1] #+ 2 * text_fs text_str = '{}'.format(batch[name][s_ix][j]) ax.text(text_x, text_y, text_str, color=cf.black if label.color==cf.yellow else cf.white, fontsize=text_fs, bbox=dict(facecolor=label.color, alpha=0.7, edgecolor='none', clip_on=True, pad=0)) p+=1 bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=label.color, facecolor='none') ax.add_patch(bbox) # # -----evtly visualise predictions ------------- # if pr_boxes is not None or seg_preds is not None: # ax = fig.add_subplot(grid[row, col]) # ax.imshow(bg_img, cmap="gray") # ax.axis("off") # col += 1 # if row == 0: # plt.title("Prediction", fontsize=title_fs) # ---------- pred boxes ------------------------- if pr_boxes is not None and len(pr_boxes[s_ix]) > 0: box_score_thres = cf.min_det_thresh if box_score_thres is None else box_score_thres for j, box in enumerate(pr_boxes[s_ix]): plot_box = box["box_type"] in ["det", "prop"] # , "pos_anchor", "neg_anchor"] if box["box_type"] == "det" and (float(box["box_score"]) <= box_score_thres or box["box_pred_class_id"] == 0): plot_box = False if plot_box: if d > 0: [z1, z2] = box["box_coords"][4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box["box_coords"][:4] width, height = x2 - x1, y2 - y1 if box["box_type"] == "det": label = cf.bin_id2label[box["rg_bin"]] color = cf.aubergine legend_items.add(label) text_x, text_y = x2, y1 #id_text = str(box["box_pred_class_id"]) + "|" if cf.plot_class_ids else "" id_text = "fg: " text_str = '{}{:.0f}'.format(id_text, box["box_score"] * 100) text_settings = dict(facecolor=color, alpha=0.5, edgecolor='none', clip_on=True, pad=0.2) ax.text(text_x, text_y, text_str, color=cf.black if label.color==cf.yellow else cf.white, bbox=text_settings, fontsize=title_fs // 2) edgecolor = color #label.color if 'regression' in box.keys(): text_x, text_y = x2, y2 id_text = "ms: "+" ".join(["{:.1f}".format(x) for x in box["regression"]])+"" text_str = '{}'.format(id_text) #, box["box_score"] * 100) text_settings = dict(facecolor=color, alpha=0.5, edgecolor='none', clip_on=True, pad=0.2) ax.text(text_x, text_y, text_str, color=cf.black if label.color==cf.yellow else cf.white, bbox=text_settings, fontsize=title_fs // 2) if 'rg_bin' in box.keys(): text_x, text_y = x1, y2 text_str = '{}'.format(box["rg_bin"]) text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) # ax.text(text_x, text_y, text_str, color=cf.white, # bbox=text_settings, fontsize=title_fs // 4) if split_ens_ics and "ens_ix" in box.keys(): n_aug = box["ens_ix"].split("_")[1] edgecolor = [c for c in cf.color_palette if not c == cf.green][ int(n_aug) % (len(cf.color_palette) - 1)] text_x, text_y = x1, y2 text_str = "{}".format(box["ens_ix"][2:]) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 6) else: label = cf.box_type2label[box["box_type"]] legend_items.add(label) edgecolor = label.color bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=edgecolor, facecolor='none') ax.add_patch(bbox) row += 1 # -----actions for all batch entries---------- if legend and len(legend_items) > 0: patches = [] for label in legend_items: if cf.plot_class_ids and type(label) != type(cf.box_labels[0]): id_text = str(label.id) + ":" else: id_text = "" patches.append(mpatches.Patch(color=label.color, label="{}{:.10s}".format(id_text, label.name))) # assumes one image gives enough y-space for 5 legend items ncols = max(1, len(legend_items) // (5 * n_samples)) plt.figlegend(handles=patches, loc="upper center", bbox_to_anchor=(0.99, 0.86), borderaxespad=0., ncol=ncols, bbox_transform=fig.transFigure, fontsize=int(2/3*title_fs)) # fig.set_size_inches(mods+3+ncols-1,1.5+1.2*n_samples) if show_info: plt.figtext(0, 0, "Batch content is of type\n{}\nand has shapes\n".format(btype) + \ "{} for 'data' and {} for 'seg'".format(data_init_shp, seg_init_shp)) if out_file is not None: plt.savefig(out_file, dpi=dpi, pad_inches=0.0, bbox_inches='tight', tight_layout=True) if return_fig: return plt.gcf() if not (server_env or cf.server_env): plt.show() plt.clf() plt.close() def view_batch_thesis(cf, batch, res_dict=None, out_file=None, legend=True, has_colorchannels=False, - isRGB=True, show_seg_ids="all", show_seg_pred=True, show_gt_boxes=True, show_gt_labels=False, show_cl_ids=True, + isRGB=True, show_seg_ids="all", show_seg_pred=True, show_gt_boxes=True, show_gt_labels=False, roi_items="all", server_env=True, sample_picks=None, vol_slice_picks=None, fontsize=12, seg_cmap="class", patient_items=False, box_score_thres=None, plot_mods=True, dpi=400, vmin=None, return_fig=False, axes=None): r"""view data and target entries of a batch. batch expected as dic with entries 'data' and 'seg' holding tensors or nparrays of size :math:`batch\_size \times modalities \times h \times w` for data and :math:`batch\_size \times classes \times h \times w` or :math:`batch\_size \times 1 \times h \times w` for segs. Classes, even if just dummy, are always needed for plotting since they determine colors. :param cf: :param batch: :param res_dict: :param out_file: :param legend: :param show_info: :param has_colorchannels: :param isRGB: :param show_seg_ids: :param show_seg_pred: :param show_gt_boxes: :param show_gt_labels: :param roi_items: strings "all" or "targets" --> all roi items in cf.roi_items or only those which are targets, or list holding keys/names of entries in cf.roi_items to plot additionally on roi boxes. empty iterator to show none. :param split_ens_ics: :param server_env: :param sample_picks: which indices of the batch to display. None for all. :param vol_slice_picks: when batch elements are 3D: which slices to display. None for all, or tuples ("random", int: amt) / (float€[0,1]: fg_prob, int: amt) for random pick / fg_slices pick w probability fg_prob of amt slices. fg pick requires gt seg. :param patient_items: set to true if patient-wise batch items should be displayed (need to be contained in batch and marked via 'patient_' prefix. :param box_score_thres: plot only boxes with pred_score > box_score_thres. None or 0. for no thres. :param plot_mods: :param dpi: graphics resolution :param vmin: min value for gs cmap in imshow, set to fix inter-batch, or None for intra-batch. pyplot expects dimensions in order y,x,chans (height, width, chans) for imshow. show_seg_ids: "all" or None or list with seg classes to show (seg_ids) """ # pfix = prefix, ptfix = postfix pfix = 'patient_' if patient_items else '' ptfix = '_2d' if (patient_items and cf.dim==2) else '' # -------------- get data, set flags ----------------- btype = type(batch[pfix + 'data']) data = batch[pfix + 'data'].astype("float32") seg = batch[pfix + 'seg'] data_init_shp, seg_init_shp = data.shape, seg.shape fg_slices = np.where(np.sum(np.sum(np.squeeze(seg), axis=0), axis=0)>0)[0] if len(fg_slices)==0: print("skipping empty patient") return if vol_slice_picks is None: vol_slice_picks = fg_slices #print("data shp, seg shp", data_init_shp, seg_init_shp) plot_bg = batch['plot_bg'] if 'plot_bg' in batch.keys() and not isinstance(batch['plot_bg'], (int, float)) else None plot_bg_chan = batch['plot_bg'] if 'plot_bg' in batch.keys() and isinstance(batch['plot_bg'], (int, float)) else 0 gt_boxes = batch[pfix+'bb_target'+ptfix] if pfix+'bb_target'+ptfix in batch.keys() and show_gt_boxes else None class_targets = batch[pfix+'class_targets'+ptfix] if pfix+'class_targets'+ptfix in batch.keys() else None cl_targets_sa = batch[pfix+'class_targets_sa'+ptfix] if pfix+'class_targets_sa'+ptfix in batch.keys() else None cf_roi_items = [pfix+it+ptfix for it in cf.roi_items] if roi_items == "all": roi_items = [it for it in cf_roi_items] elif roi_items == "targets": roi_items = [it for it in cf_roi_items if 'targets' in it] else: roi_items = [it for it in cf_roi_items if it in roi_items] if res_dict is not None: seg_preds = res_dict["seg_preds"] if (show_seg_pred is not None and 'seg_preds' in res_dict.keys() and show_seg_ids) else None if '2D_boxes' in res_dict.keys(): assert cf.dim==2 pr_boxes = res_dict["2D_boxes"] elif 'boxes' in res_dict.keys(): pr_boxes = res_dict["boxes"] else: pr_boxes = None else: seg_preds = None pr_boxes = None # -------------- get shapes, apply sample selection ----------------- (n_samples, mods, h, w), d = data.shape[:4], 0 z_ics = [slice(None)] if has_colorchannels: #has to be 2D data = np.transpose(data, axes=(0, 2, 3, 1)) # now b,y,x,c mods = 1 else: if len(data.shape) == 5: # 3dim case d = data.shape[4] if vol_slice_picks is None: z_ics = np.arange(0, d) else: z_ics = vol_slice_picks sample_ics = range(n_samples) # 8000 approx value of pixels that are displayable in one figure dim (pyplot has a render limit), depends on dpi however if data.shape[0]*data.shape[2]*len(z_ics)>8000: n_picks = max(1, int(8000/(data.shape[2]*len(z_ics)))) if len(z_ics)>1 and vol_slice_picks is None: z_ics = np.random.choice(np.arange(0, data.shape[4]), size=min(data.shape[4], max(1,int(8000/(n_picks*data.shape[2])))), replace=False) if sample_picks is None: sample_picks = np.random.choice(data.shape[0], n_picks, replace=False) if sample_picks is not None: sample_ics = [s for s in sample_picks if s in sample_ics] n_samples = len(sample_ics) if not plot_mods: mods = 0 if show_seg_ids=="all": show_seg_ids = np.unique(seg) legend_items = set() # -------------- setup figure ----------------- if isRGB: data = RGB_to_rgb(data) if plot_bg is not None: plot_bg = RGB_to_rgb(plot_bg) n_cols = mods if seg is not None or gt_boxes is not None: n_cols += 1 if seg_preds is not None or pr_boxes is not None: n_cols += 1 n_rows = n_samples*len(z_ics) grid = gridspec.GridSpec(n_rows, n_cols, wspace=0.01, hspace=0.0) fig = plt.figure(figsize=((n_cols + 1)*2, n_rows*2), tight_layout=True) title_fs = fontsize # fontsize text_fs = title_fs * 2 / 3 sample_ics, z_ics = sorted(sample_ics), sorted(z_ics) row = 0 # current row for s_count, s_ix in enumerate(sample_ics): for z_ix in z_ics: col = 0 # current col # ----visualise input data ------------- if has_colorchannels: if plot_mods: ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix][...,z_ix]) ax.axis("off") if row == 0: plt.title("Input", fontsize=title_fs) if col == 0: # key = "spec" if "spec" in batch.keys() else "pid" specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) if show_info else str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix][...,z_ix] else: for mod in range(mods): ax = fig.add_subplot(grid[row, col]) ax.imshow(data[s_ix, mod][...,z_ix], cmap="gray", vmin=vmin) suppress_axes_lines(ax) if row == 0: plt.title("Mod. " + str(mod), fontsize=title_fs) if col == 0: # key = "spec" if "spec" in batch.keys() else "pid" specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix)==slice else z_ix ylabel = str(specs[s_ix])[-5:]+"/"+str(intra_patient_ix) ax.set_ylabel("{:s}".format(ylabel), fontsize=title_fs) # show id-number col += 1 bg_img = plot_bg[s_ix][...,z_ix] if plot_bg is not None else data[s_ix, plot_bg_chan][...,z_ix] # ---evtly visualise groundtruths------------------- if seg is not None or gt_boxes is not None: # img as bg for gt if axes is not None and 'gt' in axes.keys(): ax = axes['gt'] else: ax = fig.add_subplot(grid[row, col]) ax.imshow(bg_img, cmap="gray", vmin=vmin) if row == 0: ax.set_title("Ground Truth", fontsize=title_fs) if col == 0: # key = "spec" if "spec" in batch.keys() else "pid" specs = batch.get('spec', batch['pid']) intra_patient_ix = s_ix if type(z_ix) == slice else z_ix ylabel = str(specs[s_ix])[-5:] + "/" + str(intra_patient_ix) # str(specs[s_ix])[-5:] ax.set_ylabel("{:s}".format(ylabel), fontsize=text_fs*1.3) # show id-number suppress_axes_lines(ax) else: ax.axis('off') col += 1 # gt bounding boxes if gt_boxes is not None and len(gt_boxes[s_ix]) > 0: for j, box in enumerate(gt_boxes[s_ix]): if d > 0: [z1, z2] = box[4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box[:4] # [x1,y1,x2,y2] = box[:4]#:return: coords (x1, y1, x2, y2) width, height = x2 - x1, y2 - y1 if class_targets is not None: try: label = cf.bin_id2label[cf.rg_val_to_bin_id(batch['patient_regression_targets'][s_ix][j])] except AttributeError: label = cf.class_id2label[class_targets[s_ix][j]] legend_items.add(label) if show_gt_labels and cf.plot_class_ids: bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=label.color, facecolor='none') if height<=text_fs*6: y1 -= text_fs*1.5 y2 += text_fs*2 text_poss, p = [(x1, y1), (x1, (y1+y2)//2)], 0 if roi_items is not None: for name in roi_items: if name in cf_roi_items and batch[name][s_ix][j] is not None: if 'class_targets' in name: text_str = '{}'.format(class_targets[s_ix][j]) text_x, text_y = (x2 + 0 * len(text_str) // 4, y2) elif 'regression_targets' in name: text_str = 'agg. MS: {:.2f}'.format(batch[name][s_ix][j][0]) text_x, text_y = (x2 + 0 * len(text_str) // 4, y2) elif 'rg_bin_targets_sa' in name: text_str = 'sa. MS: {}'.format(batch[name][s_ix][j]) text_x, text_y = (x2-0*len(text_str)*text_fs//4, y1) # elif 'rg_bin_targets' in name: # text_str = 'agg. ms:{}'.format(batch[name][s_ix][j]) # text_x, text_y = (x2+0*len(text_str)//4, y1) ax.text(text_x, text_y, text_str, color=cf.black if (label.color[:3]==cf.yellow or label.color[:3]==cf.green) else cf.white, fontsize=text_fs, bbox=dict(facecolor=label.color, alpha=0.7, edgecolor='none', clip_on=True, pad=0)) p+=1 ax.add_patch(bbox) if seg is not None and seg.shape[1] == 1: #cmap = {1: cf.orange} # cmap = {label_id: label.color for label_id, label in cf.bin_id2label.items()} # this whole function is totally only hacked together for a quick very specific case if seg_cmap == "rg" or seg_cmap=="regression": cmap = {1: cf.bin_id2label[cf.rg_val_to_bin_id(batch['patient_regression_targets'][s_ix][0])].color} else: cmap = cf.class_cmap ax.imshow(to_rgba(seg[s_ix][0][...,z_ix], cmap), alpha=0.8) # # -----evtly visualise predictions ------------- if pr_boxes is not None or seg_preds is not None: if axes is not None and 'pred' in axes.keys(): ax = axes['pred'] else: ax = fig.add_subplot(grid[row, col]) ax.imshow(bg_img, cmap="gray") ax.axis("off") col += 1 if row == 0: ax.set_title("Prediction", fontsize=title_fs) # ---------- pred boxes ------------------------- if pr_boxes is not None and len(pr_boxes[s_ix]) > 0: alpha = 0.7 box_score_thres = cf.min_det_thresh if box_score_thres is None else box_score_thres for j, box in enumerate(pr_boxes[s_ix]): plot_box = box["box_type"] in ["det", "prop"] # , "pos_anchor", "neg_anchor"] if box["box_type"] == "det" and (float(box["box_score"]) <= box_score_thres or box["box_pred_class_id"] == 0): plot_box = False if plot_box: if d > 0: [z1, z2] = box["box_coords"][4:] if not (z1<=z_ix and z_ix<=z2): box = [] if len(box) > 0: [y1, x1, y2, x2] = box["box_coords"][:4] width, height = x2 - x1, y2 - y1 if box["box_type"] == "det": try: label = cf.bin_id2label[cf.rg_val_to_bin_id(box['regression'])] except AttributeError: label = cf.class_id2label[box['box_pred_class_id']] # assert box["rg_bin"] == cf.rg_val_to_bin_id(box['regression']), \ # "box bin: {}, rg-bin {}".format(box["rg_bin"], cf.rg_val_to_bin_id(box['regression'])) color = label.color#cf.aubergine edgecolor = color # label.color text_color = cf.black if (color[:3]==cf.yellow or color[:3]==cf.green) else cf.white legend_items.add(label) bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=0.6, edgecolor=edgecolor, facecolor='none') if height<=text_fs*6: y1 -= text_fs*1.5 y2 += text_fs*2 text_x, text_y = x2, y1 #id_text = str(box["box_pred_class_id"]) + "|" if cf.plot_class_ids else "" id_text = "FG: " text_str = r'{}{:.0f}%'.format(id_text, box["box_score"] * 100) text_settings = dict(facecolor=color, alpha=alpha, edgecolor='none', clip_on=True, pad=0.2) ax.text(text_x, text_y, text_str, color=text_color, bbox=text_settings, fontsize=text_fs ) if 'regression' in box.keys(): text_x, text_y = x2, y2 id_text = "MS: "+" ".join(["{:.2f}".format(x) for x in box["regression"]])+"" text_str = '{}'.format(id_text) text_settings = dict(facecolor=color, alpha=alpha, edgecolor='none', clip_on=True, pad=0.2) ax.text(text_x, text_y, text_str, color=text_color, bbox=text_settings, fontsize=text_fs) if 'rg_bin' in box.keys(): text_x, text_y = x1, y2 text_str = '{}'.format(box["rg_bin"]) text_settings = dict(facecolor=color, alpha=alpha, edgecolor='none', clip_on=True, pad=0) # ax.text(text_x, text_y, text_str, color=cf.white, # bbox=text_settings, fontsize=title_fs // 4) - if 'box_pred_class_id' in box.keys() and show_cl_ids: + if 'box_pred_class_id' in box.keys(): text_x, text_y = x2, y2 id_text = box["box_pred_class_id"] text_str = '{}'.format(id_text) text_settings = dict(facecolor=color, alpha=alpha, edgecolor='none', clip_on=True, pad=0.2) ax.text(text_x, text_y, text_str, color=text_color, bbox=text_settings, fontsize=text_fs) else: label = cf.box_type2label[box["box_type"]] legend_items.add(label) edgecolor = label.color ax.add_patch(bbox) row += 1 # -----actions for all batch entries---------- if legend and len(legend_items) > 0: patches = [] for label in legend_items: if cf.plot_class_ids and type(label) != type(cf.box_labels[0]): id_text = str(label.id) + ":" else: id_text = "" patches.append(mpatches.Patch(color=label.color, label="{}{:.10s}".format(id_text, label.name))) # assumes one image gives enough y-space for 5 legend items ncols = max(1, len(legend_items) // (5 * n_samples)) plt.figlegend(handles=patches, loc="upper center", bbox_to_anchor=(0.99, 0.86), borderaxespad=0., ncol=ncols, bbox_transform=fig.transFigure, fontsize=int(2/3*title_fs)) # fig.set_size_inches(mods+3+ncols-1,1.5+1.2*n_samples) if out_file is not None: plt.savefig(out_file, dpi=dpi, pad_inches=0.0, bbox_inches='tight', tight_layout=True) if return_fig: return plt.gcf() if not (server_env or cf.server_env): plt.show() plt.clf() plt.close() def view_slices(cf, img, seg=None, ids=None, title="", out_dir=None, legend=True, cmap=None, label_remap=None, instance_labels=False): """View slices of a 3D image overlayed with corresponding segmentations. :params img, seg: expected as 3D-arrays """ if isinstance(img, sitk.SimpleITK.Image): img = sitk.GetArrayViewFromImage(img) elif isinstance(img, np.ndarray): #assume channels dim is smallest and in either first or last place if np.argmin(img.shape)==2: img = np.moveaxis(img, 2,0) else: raise Exception("view_slices got unexpected img type.") if seg is not None: if isinstance(seg, sitk.SimpleITK.Image): seg = sitk.GetArrayViewFromImage(seg) elif isinstance(img, np.ndarray): if np.argmin(seg.shape)==2: seg = np.moveaxis(seg, 2,0) else: raise Exception("view_slices got unexpected seg type.") if label_remap is not None: for (key, val) in label_remap.items(): seg[seg==key] = val if instance_labels: class Label(): def __init__(self, id, name, color): self.id = id self.name = name self.color = color legend_items = {Label(seg_id, "instance_{}".format(seg_id), cf.color_palette[seg_id%len(cf.color_palette)]) for seg_id in np.unique(seg)} if cmap is None: cmap = {label.id : label.color for label in legend_items} else: legend_items = {cf.seg_id2label[seg_id] for seg_id in np.unique(seg)} if cmap is None: cmap = {label.id : label.color for label in legend_items} slices = img.shape[0] if seg is not None: assert slices==seg.shape[0], "Img and seg have different amt of slices." grid = gridspec.GridSpec(int(np.ceil(slices/4)),4) fig = plt.figure(figsize=(10, slices/4*2.5)) rng = np.arange(slices, dtype='uint8') if not ids is None: rng = rng[ids] for s in rng: ax = fig.add_subplot(grid[int(s/4),int(s%4)]) ax.imshow(img[s], cmap="gray") if not seg is None: ax.imshow(to_rgba(seg[s], cmap), alpha=0.9) if legend and int(s/4)==0 and int(s%4)==3: patches = [mpatches.Patch(color=label.color, label="{}".format(label.name)) for label in legend_items] ncols = 1 plt.legend(handles=patches,bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0., ncol=ncols) plt.title("slice {}, {}".format(s, img[s].shape)) plt.axis('off') plt.suptitle(title) if out_dir is not None: plt.savefig(out_dir, dpi=300, pad_inches=0.0, bbox_inches='tight') if not cf.server_env: plt.show() plt.close() def plot_txt(cf, txts, labels=None, title="", x_label="", y_labels=["",""], y_ranges=(None,None), twin_axes=(), smooth=None, out_dir=None): """Read and plot txt data, either from file (txts is paths) or directly (txts is arrays). :param twin_axes: plot two y-axis over same x-axis. twin_axes expected as tuple defining which txt files (determined via indices) share the second y-axis. """ if isinstance(txts, str) or not hasattr(txts, '__iter__'): txts = [txts] fig = plt.figure() ax1 = fig.add_subplot(1,1,1) if len(twin_axes)>0: ax2 = ax1.twinx() for i, txt in enumerate(txts): if isinstance(txt, str): arr = np.genfromtxt(txt, delimiter=',',skip_header=1, usecols=(1,2)) else: arr = txt if i in twin_axes: ax = ax2 else: ax = ax1 if smooth is not None: spline_graph = interpol.UnivariateSpline(arr[:,0], arr[:,1], k=5, s=float(smooth)) ax.plot(arr[:, 0], spline_graph(arr[:,0]), color=cf.color_palette[i % len(cf.color_palette)], marker='', markersize=2, linestyle='solid') ax.plot(arr[:,0], arr[:,1], color=cf.color_palette[i%len(cf.color_palette)], marker='', markersize=2, linestyle='solid', label=labels[i], alpha=0.5 if smooth else 1.) plt.title(title) ax1.set_xlabel(x_label) ax1.set_ylabel(y_labels[0]) if y_ranges[0] is not None: ax1.set_ylim(y_ranges[0]) if len(twin_axes)>0: ax2.set_ylabel(y_labels[1]) if y_ranges[1] is not None: ax2.set_ylim(y_ranges[1]) plt.grid() if labels is not None: ax1.legend(loc="upper center") if len(twin_axes)>0: ax2.legend(loc=4) if out_dir is not None: plt.savefig(out_dir, dpi=200) return fig def plot_tboard_logs(cf, log_dir, tag_filters=[""], inclusive_filters=True, out_dir=None, x_label="", y_labels=["",""], y_ranges=(None,None), twin_axes=(), smooth=None): """Plot (only) tboard scalar logs from given log_dir for multiple runs sorted by tag. """ print("log dir", log_dir) mpl = EventMultiplexer().AddRunsFromDirectory(log_dir) #EventAccumulator(log_dir) mpl.Reload() # Print tags of contained entities, use these names to retrieve entities as below #print(mpl.Runs()) scalars = {runName : data['scalars'] for (runName, data) in mpl.Runs().items() if len(data['scalars'])>0} print("scalars", scalars) tags = {} tag_filters = [tag_filter.lower() for tag_filter in tag_filters] for (runName, runtags) in scalars.items(): print("rn", runName.lower()) check = np.any if inclusive_filters else np.all if np.any([tag_filter in runName.lower() for tag_filter in tag_filters]): for runtag in runtags: #if tag_filter in runtag.lower(): if runtag not in tags: tags[runtag] = [runName] else: tags[runtag].append(runName) print("tags ", tags) for (tag, runNames) in tags.items(): print("runnames ", runNames) print("tag", tag) tag_scalars = [] labels = [] for run in runNames: #mpl.Scalars returns ScalarEvents array holding wall_time, step, value per time step (shape series_length x 3) #print(mpl.Scalars(runName, tag)[0]) run_scalars = [(s.step, s.value) for s in mpl.Scalars(run, tag)] print(np.array(run_scalars).shape) tag_scalars.append(np.array(run_scalars)) print("run", run) labels.append("/".join(run.split("/")[-2:])) #print("tag scalars ", tag_scalars) if out_dir is not None: out_path = os.path.join(out_dir,tag.replace("/","_")) else: out_path = None plot_txt(txts=tag_scalars, labels=labels, title=tag, out_dir=out_path, cf=cf, x_label=x_label, y_labels=y_labels, y_ranges=y_ranges, twin_axes=twin_axes, smooth=smooth) def plot_box_legend(cf, box_coords=None, class_id=None, out_dir=None): """plot a blank box explaining box annotations. :param cf: :return: """ if class_id is None: class_id = 1 img = np.ones(cf.patch_size[:2]) dim_max = max(cf.patch_size[:2]) width, height = cf.patch_size[0] // 2, cf.patch_size[1] // 2 if box_coords is None: # lower left corner x1, y1 = width // 2, height // 2 x2, y2 = x1 + width, y1 + height else: y1, x1, y2, x2 = box_coords fig = plt.figure(tight_layout=True, dpi=300) ax = fig.add_subplot(111) title_fs = 36 label = cf.class_id2label[class_id] # legend_items.add(label) ax.set_facecolor(cf.beige) ax.imshow(img, cmap='gray', vmin=0., vmax=1., alpha=0) # ax.axis('off') # suppress_axes_lines(ax) ax.set_xticks([]) ax.set_yticks([]) text_x, text_y = x2 * 0.85, y1 id_text = "class id" + " | " if cf.plot_class_ids else "" text_str = '{}{}'.format(id_text, "confidence") text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) edgecolor = label.color if any(['regression' in task for task in cf.prediction_tasks]): text_x, text_y = x2 * 0.85, y2 id_text = "regression" if any(['ken_gal' in task or 'feindt' in task for task in cf.prediction_tasks]): id_text += " | uncertainty" text_str = '{}'.format(id_text) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) if 'regression_bin' in cf.prediction_tasks or hasattr(cf, "rg_val_to_bin_id"): text_x, text_y = x1, y2 text_str = 'Rg. Bin' ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) if 'lesion_gleasons' in cf.observables_rois: text_x, text_y = x1, y1 text_str = 'Gleason Score' ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) bbox = mpatches.Rectangle((x1, y1), width, height, linewidth=1., edgecolor=edgecolor, facecolor='none') ax.add_patch(bbox) if out_dir is not None: plt.savefig(os.path.join(out_dir, "box_legend.png")) def plot_boxes(cf, box_coords, patch_size=None, scores=None, class_ids=None, out_file=None, ax=None): if patch_size is None: patch_size = cf.patch_size[:2] if class_ids is None: class_ids = np.ones((len(box_coords),), dtype='uint8') if scores is None: scores = np.ones((len(box_coords),), dtype='uint8') img = np.ones(patch_size) y1, x1, y2, x2 = box_coords[:,0], box_coords[:,1], box_coords[:,2], box_coords[:,3] width, height = x2-x1, y2-y1 close = False if ax is None: fig = plt.figure(tight_layout=True, dpi=300) ax = fig.add_subplot(111) close = True title_fs = 56 ax.set_facecolor((*cf.gray,0.15)) ax.imshow(img, cmap='gray', vmin=0., vmax=1., alpha=0) #ax.axis('off') #suppress_axes_lines(ax) ax.set_xticks([]) ax.set_yticks([]) for bix, cl_id in enumerate(class_ids): label = cf.class_id2label[cl_id] text_x, text_y = x2[bix] -20, y1[bix] +5 id_text = class_ids[bix] if cf.plot_class_ids else "" text_str = '{}{}{:.0f}'.format(id_text, " | ", scores[bix] * 100) text_settings = dict(facecolor=label.color, alpha=0.5, edgecolor='none', clip_on=True, pad=0) ax.text(text_x, text_y, text_str, color=cf.white, bbox=text_settings, fontsize=title_fs // 4) edgecolor = label.color bbox = mpatches.Rectangle((x1[bix], y1[bix]), width[bix], height[bix], linewidth=1., edgecolor=edgecolor, facecolor='none') ax.add_patch(bbox) if out_file is not None: plt.savefig(out_file) if close: plt.close() if __name__=="__main__": cluster_exp_root = "/mnt/E132-Cluster-Projects" #dataset="prostate/" dataset = "lidc/" exp_name = "ms13_mrcnnal3d_rg_bs8_480k" #exp_dir = os.path.join("datasets", dataset, "experiments", exp_name) # exp_dir = os.path.join(cluster_exp_root, dataset, "experiments", exp_name) # log_dir = os.path.join(exp_dir, "logs") # sys.path.append(exp_dir) # from configs import Configs # cf = configs() # # #print("logdir", log_dir) # #out_dir = os.path.join(cf.source_dir, log_dir.replace("/", "_")) # #print("outdir", out_dir) # log_dir = os.path.join(cf.source_dir, log_dir) # plot_tboard_logs(cf, log_dir, tag_filters=["train/lesion_avp", "val/lesion_ap", "val/lesion_avp", "val/patient_lesion_avp"], smooth=2.2, out_dir=log_dir, # y_ranges=([0,900], [0,0.8]), # twin_axes=[1], y_labels=["counts",""], x_label="epoch") #plot_box_legend(cf, out_dir=exp_dir) diff --git a/shell_scripts/ana_starter.sh b/shell_scripts/ana_starter.sh deleted file mode 100644 index 1eeb63d..0000000 --- a/shell_scripts/ana_starter.sh +++ /dev/null @@ -1,11 +0,0 @@ -mode=${1} -dataset_name=${2} - -source_dir=/home/gregor/Documents/medicaldetectiontoolkit - -exps_dir=/home/gregor/networkdrives/E132-Cluster-Projects/${dataset_name}/experiments_float_data -exps_dirs=$(ls -d ${exps_dir}/*) -for dir in ${exps_dirs}; do - echo "starting ${mode} in ${dir}" - (python ${source_dir}/exec.py --use_stored_settings --mode ${mode} --dataset_name ${dataset_name} --exp_dir ${dir}) || (echo "FAILED!") -done diff --git a/understanding_metrics.py b/understanding_metrics.py deleted file mode 100644 index 6e1532f..0000000 --- a/understanding_metrics.py +++ /dev/null @@ -1,66 +0,0 @@ - -""" -Created at 06/12/18 13:34 -@author: gregor -""" -import sys -import os -import numpy as np -import pandas as pd -from sklearn.metrics import roc_auc_score, average_precision_score -from sklearn.metrics import roc_curve, precision_recall_curve - -import plotting as plg -import evaluator - -sys.path.append("datasets/prostate/") -from configs import Configs - -""" This is just a supplementary file which you may use to demonstrate or understand detection metrics. -""" - - -def get_det_types(df): - det_types = [] - for ix, score in enumerate(df["pred_score"]): - if score > 0 and df["class_label"][ix] == 1: - det_types.append("det_tp") - elif score > 0 and df["class_label"][ix] == 0: - det_types.append("det_fp") - elif score == 0 and df["class_label"][ix] == 1: - det_types.append("det_fn") - elif score == 0 and df["class_label"][ix] == 0: - det_types.append("det_tn") - return det_types - - -if __name__=="__main__": - cf = Configs() - - working_dir = "/home/gregor/Documents/ramien/Thesis/UnderstandingMetrics" - - df = pd.DataFrame(columns=['pred_score', 'class_label', 'pred_class', 'det_type', 'match_iou']) - - df["pred_score"] = [0.3, 0.] - df["class_label"] = [0, 1] - #df["pred_class"] = [1]*len(df) - det_types = get_det_types(df) - - df["det_type"] = det_types - df["match_iou"] = [0.1]*len(df) - - prc_own = evaluator.compute_prc(df) - all_stats = [{"prc":prc_own, 'roc':np.nan, 'name': "demon"}] - plg.plot_stat_curves(cf, all_stats, os.path.join(working_dir, "understanding_ap_own"), fill=True) - - prc_sk = precision_recall_curve(df.class_label.tolist(), df.pred_score.tolist()) - all_stats = [{"prc":prc_sk, 'roc':np.nan, 'name': "demon"}] - plg.plot_stat_curves(cf, all_stats, os.path.join(working_dir, "understanding_ap"), fill=True) - - ap = evaluator.get_roi_ap_from_df((df, 0.02, False)) - ap_sk = average_precision_score(df.class_label.tolist(), df.pred_score.tolist()) - print("roi_ap_from_df (own implement):",ap) - print("aver_prec_sc (sklearn):",ap_sk) - - plg.plot_prediction_hist(cf, df, os.path.join(working_dir, "understanding_ap.png"), title="AP_own {:.2f}, AP_sklearn {:.2f}".format(ap, ap_sk)) -