diff --git a/.gitignore b/.gitignore index e59b5fb..a15e261 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,9 @@ venv.bak/ # results results/ configs/*/grid_search +configs_dev/ +bash_scripts/ +logs/ # Pycharm .idea \ No newline at end of file diff --git a/cellbox/cellbox/__init__.py b/cellbox/cellbox/__init__.py index c9da2f2..bbc566b 100644 --- a/cellbox/cellbox/__init__.py +++ b/cellbox/cellbox/__init__.py @@ -5,6 +5,7 @@ from cellbox.model import * from cellbox.kernel import * from cellbox.dataset import * +from cellbox.dataset_torch import * from cellbox.train import * from cellbox.utils import * from cellbox.version import __version__, VERSION, get_msg diff --git a/cellbox/cellbox/dataset.py b/cellbox/cellbox/dataset.py index 15854f7..5c8c663 100644 --- a/cellbox/cellbox/dataset.py +++ b/cellbox/cellbox/dataset.py @@ -138,13 +138,13 @@ def s2c(cfg): def loo(cfg, singles): """data parition for leave-one-drug-out experiments""" drug_index = int(cfg.drug_index) - double_idx = cfg.loo.all(axis=1) + double_idx = cfg.loo.all(axis=1) # Pick out non-zero rows - testidx = (cfg.loo == drug_index).any(axis=1) + testidx = (cfg.loo == drug_index).any(axis=1) # Pick out rows that have the drug index if singles: testidx = pd.concat([testidx, double_idx], axis=1) - testidx = testidx.all(axis=1) + testidx = testidx.all(axis=1) # Pick out rows that are both non-zero and have the drug index nexp, _ = cfg.pert.shape nvalid = nexp - sum(testidx) diff --git a/cellbox/cellbox/dataset_torch.py b/cellbox/cellbox/dataset_torch.py new file mode 100644 index 0000000..6fc494a --- /dev/null +++ b/cellbox/cellbox/dataset_torch.py @@ -0,0 +1,308 @@ +""" +This module defines the data partitioning for different training schemes, +including single-to-combo (s2c), leave-one-drug-out cross-validations (loo), +and random partition of the dataset. +""" + +import os +import numpy as np +import pandas as pd +import torch +import torch.nn as nn + +from torch.utils.data import TensorDataset +from torch.utils.data import DataLoader + +from scipy import sparse + + + +def factory(cfg): + """formulate training dataset""" + # Prepare data + # To replace cfg.pert_in and cfg.expr_out, should it take the full pert and expr datasets? + if cfg.sparse_data: + #cfg.pert_in = tf.compat.v1.sparse.placeholder(tf.float32, [None, cfg.n_x], name='pert_in') + #cfg.expr_out = tf.compat.v1.sparse.placeholder(tf.float32, [None, cfg.n_x], name='expr_out') + cfg.pert = sparse.load_npz(os.path.join(cfg.root_dir, cfg.pert_file)) + cfg.expr = sparse.load_npz(os.path.join(cfg.root_dir, cfg.expr_file)) + else: + #cfg.pert_in = tf.compat.v1.placeholder(tf.float32, [None, cfg.n_x], name='pert_in') + #cfg.expr_out = tf.compat.v1.placeholder(tf.float32, [None, cfg.n_x], name='expr_out') + cfg.pert = pd.read_csv(os.path.join(cfg.root_dir, cfg.pert_file), header=None, dtype=np.float32) + cfg.expr = pd.read_csv(os.path.join(cfg.root_dir, cfg.expr_file), header=None, dtype=np.float32) + + group_df = pd.DataFrame(np.where(cfg.pert != 0), index=['row_id', 'pert_idx']).T.groupby('row_id') + max_combo_degree = group_df.pert_idx.count().max() + cfg.loo = pd.DataFrame(group_df.pert_idx.apply( + lambda x: pad_and_realign(x, max_combo_degree, cfg.n_activity_nodes - 1) + ).tolist()) + + # add noise + if cfg.add_noise_level > 0: + np.random.seed(cfg.seed) + assert not cfg.sparse_data, "Adding noise to sparse data format is yet to be supported" + cfg.expr.iloc[:] = cfg.expr.values + np.random.normal(loc=0, scale=cfg.add_noise_level, size=cfg.expr.shape) + + # Data partition + if cfg.experiment_type == 'random partition' or cfg.experiment_type == 'full data': + cfg.dataset = random_partition(cfg) + + elif cfg.experiment_type == 'leave one out (w/o single)': + cfg.dataset = loo(cfg, singles=False) + + elif cfg.experiment_type == 'leave one out (w/ single)': + cfg.dataset = loo(cfg, singles=True) + + elif cfg.experiment_type == 'single to combo': + cfg.dataset = s2c(cfg) + + elif cfg.experiment_type == 'random partition with replicates': + cfg.dataset = random_partition_with_replicates(cfg) + + # Prepare feed_dicts + #cfg.feed_dicts = { + # 'train_set': { + # cfg.pert_in: cfg.dataset['pert_train'], + # cfg.expr_out: cfg.dataset['expr_train'], + # }, + # 'valid_set': { + # cfg.pert_in: cfg.dataset['pert_valid'], + # cfg.expr_out: cfg.dataset['expr_valid'], + # }, + # 'test_set': { + # cfg.pert_in: cfg.dataset['pert_test'], + # cfg.expr_out: cfg.dataset['expr_test'] + # } + #} + cfg = get_tensors(cfg) + + return cfg + + +def pad_and_realign(x, length, idx_shift=0): + x -= idx_shift + padded = np.pad(x, (0, length - len(x)), 'constant') + return padded + + +def get_tensors(cfg): + # prepare training placeholders + #cfg.l1_lambda_placeholder = tf.compat.v1.placeholder(tf.float32, name='l1_lambda') + #cfg.l2_lambda_placeholder = tf.compat.v1.placeholder(tf.float32, name='l2_lambda') + #cfg.lr = tf.compat.v1.placeholder(tf.float32, name='lr') + cfg.l1_lambda_placeholder = 0.0 + cfg.l2_lambda_placeholder = 0.0 + cfg.lr = 0.0 + + # Prepare dataset iterators (these can be replaced with DataLoader) + #dataset = tf.data.Dataset.from_tensor_slices((cfg.pert_in, cfg.expr_out)) + #cfg.iter_train = tf.compat.v1.data.make_initializable_iterator( + # dataset.shuffle(buffer_size=1024, reshuffle_each_iteration=True).batch(cfg.batchsize)) + #cfg.iter_monitor = tf.compat.v1.data.make_initializable_iterator( + # dataset.repeat().shuffle(buffer_size=1024, reshuffle_each_iteration=True).batch(cfg.batchsize)) + #cfg.iter_eval = tf.compat.v1.data.make_initializable_iterator(dataset.batch(cfg.batchsize)) + train_dataset = TensorDataset( + torch.from_numpy(cfg.dataset["pert_train"]), torch.from_numpy(cfg.dataset["expr_train"]) + ) + val_dataset = TensorDataset( + torch.from_numpy(cfg.dataset["pert_valid"]), torch.from_numpy(cfg.dataset["expr_valid"]) + ) + test_dataset = TensorDataset( + torch.from_numpy(cfg.dataset["pert_test"]), torch.from_numpy(cfg.dataset["expr_test"]) + ) + + cfg.iter_train = DataLoader( + train_dataset, batch_size=cfg.batchsize, shuffle=True + ) + cfg.iter_monitor = DataLoader( + val_dataset, batch_size=cfg.batchsize, shuffle=True + ) + cfg.iter_eval = DataLoader( + test_dataset, batch_size=cfg.batchsize, shuffle=True + ) + + return cfg + + +def s2c(cfg): + """data parition for single-to-combo experiments""" + double_idx = cfg.loo.all(axis=1) + testidx = double_idx + + nexp, _ = cfg.pert.shape + nvalid = nexp - sum(testidx) + ntrain = int(nvalid * cfg.validset_ratio) + + valid_pos = np.random.choice(range(nvalid), nvalid, replace=False) + dataset = { + "node_index": cfg.node_index, + "pert_full": cfg.pert, + "train_pos": valid_pos[:ntrain], + "valid_pos": valid_pos[ntrain:], + "test_pos": testidx + } + + if cfg.sparse_data: + dataset.update({ + "pert_train": sparse_to_feedable_arrays(cfg.pert[~testidx][valid_pos[:ntrain]]), + "pert_valid": sparse_to_feedable_arrays(cfg.pert[~testidx][valid_pos[ntrain:]]), + "pert_test": sparse_to_feedable_arrays(cfg.pert[testidx]), + "expr_train": sparse_to_feedable_arrays(cfg.expr[~testidx][valid_pos[:ntrain]]), + "expr_valid": sparse_to_feedable_arrays(cfg.expr[~testidx][valid_pos[ntrain:]]), + "expr_test": sparse_to_feedable_arrays(cfg.expr[testidx]) + }) + else: + dataset.update({ + "pert_train": cfg.pert[~testidx].iloc[valid_pos[:ntrain], :].values, + "pert_valid": cfg.pert[~testidx].iloc[valid_pos[ntrain:], :].values, + "pert_test": cfg.pert[testidx].values, + "expr_train": cfg.expr[~testidx].iloc[valid_pos[:ntrain], :].values, + "expr_valid": cfg.expr[~testidx].iloc[valid_pos[ntrain:], :].values, + "expr_test": cfg.expr[testidx].values + }) + + return dataset + + +def loo(cfg, singles): + """data parition for leave-one-drug-out experiments""" + drug_index = int(cfg.drug_index) + double_idx = cfg.loo.all(axis=1) + + testidx = (cfg.loo == drug_index).any(axis=1) + + if singles: + testidx = pd.concat([testidx, double_idx], axis=1) + testidx = testidx.all(axis=1) + + nexp, _ = cfg.pert.shape + nvalid = nexp - sum(testidx) + ntrain = int(nvalid * cfg.validset_ratio) + + valid_pos = np.random.choice(range(nvalid), nvalid, replace=False) + dataset = { + "node_index": cfg.node_index, + "pert_full": cfg.pert, + "train_pos": valid_pos[:ntrain], + "valid_pos": valid_pos[ntrain:], + "test_pos": testidx + } + + if cfg.sparse_data: + dataset.update({ + "pert_train": sparse_to_feedable_arrays(cfg.pert[~testidx][valid_pos[:ntrain]]), + "pert_valid": sparse_to_feedable_arrays(cfg.pert[~testidx][valid_pos[ntrain:]]), + "pert_test": sparse_to_feedable_arrays(cfg.pert[testidx]), + "expr_train": sparse_to_feedable_arrays(cfg.expr[~testidx][valid_pos[:ntrain]]), + "expr_valid": sparse_to_feedable_arrays(cfg.expr[~testidx][valid_pos[ntrain:]]), + "expr_test": sparse_to_feedable_arrays(cfg.expr[testidx]) + }) + else: + dataset.update({ + "pert_train": cfg.pert[~testidx].iloc[valid_pos[:ntrain], :].values, + "pert_valid": cfg.pert[~testidx].iloc[valid_pos[ntrain:], :].values, + "pert_test": cfg.pert[testidx].values, + "expr_train": cfg.expr[~testidx].iloc[valid_pos[:ntrain], :].values, + "expr_valid": cfg.expr[~testidx].iloc[valid_pos[ntrain:], :].values, + "expr_test": cfg.expr[testidx].values + }) + + return dataset + + +def random_partition(cfg): + """random dataset partition""" + nexp, _ = cfg.pert.shape + nvalid = int(nexp * cfg.trainset_ratio) + ntrain = int(nvalid * cfg.validset_ratio) + try: + random_pos = np.genfromtxt('random_pos.csv', defaultfmt='%d') + except Exception: + random_pos = np.random.choice(range(nexp), nexp, replace=False) + np.savetxt('random_pos.csv', random_pos, fmt='%d') + + dataset = { + "node_index": cfg.node_index, + "pert_full": cfg.pert, + "train_pos": random_pos[:ntrain], + "valid_pos": random_pos[ntrain:nvalid], + "test_pos": random_pos[nvalid:] + } + + if cfg.sparse_data: + dataset.update({ + "pert_train": sparse_to_feedable_arrays(cfg.pert[random_pos[:ntrain], :]), + "pert_valid": sparse_to_feedable_arrays(cfg.pert[random_pos[ntrain:nvalid], :]), + "pert_test": sparse_to_feedable_arrays(cfg.pert[random_pos[nvalid:], :]), + "expr_train": sparse_to_feedable_arrays(cfg.expr[random_pos[:ntrain], :]), + "expr_valid": sparse_to_feedable_arrays(cfg.expr[random_pos[ntrain:nvalid], :]), + "expr_test": sparse_to_feedable_arrays(cfg.expr[random_pos[nvalid:], :]) + }) + else: + dataset.update({ + "pert_train": cfg.pert.iloc[random_pos[:ntrain], :].values, + "pert_valid": cfg.pert.iloc[random_pos[ntrain:nvalid], :].values, + "pert_test": cfg.pert.iloc[random_pos[nvalid:], :].values, + "expr_train": cfg.expr.iloc[random_pos[:ntrain], :].values, + "expr_valid": cfg.expr.iloc[random_pos[ntrain:nvalid], :].values, + "expr_test": cfg.expr.iloc[random_pos[nvalid:], :].values + }) + + return dataset + + +def random_partition_with_replicates(cfg): + """random dataset partition""" + nexp = len(np.unique(cfg.loo, axis=0)) + nvalid = int(nexp * cfg.trainset_ratio) + ntrain = int(nvalid * cfg.validset_ratio) + conds_train_idx = np.random.choice(range(nexp), nexp, replace=False) + pos_train = [idx for idx in range(nexp) if idx in conds_train_idx[:ntrain]] + pos_valid = [idx for idx in range(nexp) if idx in conds_train_idx[ntrain:nvalid]] + pos_test = [idx for idx in range(nexp) if idx in conds_train_idx[nvalid:]] + + try: + random_pos = np.genfromtxt('random_pos.csv', defaultfmt='%d') + except Exception: + random_pos = np.concatenate([pos_train, pos_valid, pos_test]) + np.savetxt('random_pos.csv', random_pos, fmt='%d') + + dataset = { + "node_index": cfg.node_index, + "pert_full": cfg.pert, + "train_pos": random_pos[:ntrain], + "valid_pos": random_pos[ntrain:nvalid], + "test_pos": random_pos[nvalid:] + } + + if cfg.sparse_data: + dataset.update({ + "pert_train": sparse_to_feedable_arrays(cfg.pert[random_pos[:ntrain], :]), + "pert_valid": sparse_to_feedable_arrays(cfg.pert[random_pos[ntrain:nvalid], :]), + "pert_test": sparse_to_feedable_arrays(cfg.pert[random_pos[nvalid:], :]), + "expr_train": sparse_to_feedable_arrays(cfg.expr[random_pos[:ntrain], :]), + "expr_valid": sparse_to_feedable_arrays(cfg.expr[random_pos[ntrain:nvalid], :]), + "expr_test": sparse_to_feedable_arrays(cfg.expr[random_pos[nvalid:], :]) + }) + else: + dataset.update({ + "pert_train": cfg.pert.iloc[random_pos[:ntrain], :].values, + "pert_valid": cfg.pert.iloc[random_pos[ntrain:nvalid], :].values, + "pert_test": cfg.pert.iloc[random_pos[nvalid:], :].values, + "expr_train": cfg.expr.iloc[random_pos[:ntrain], :].values, + "expr_valid": cfg.expr.iloc[random_pos[ntrain:nvalid], :].values, + "expr_test": cfg.expr.iloc[random_pos[nvalid:], :].values + }) + + return dataset + + + +def sparse_to_feedable_arrays(npz): + """convert sparse matrix to arrays""" + coo = npz.tocoo() + indices = [[i, j] for i, j in zip(coo.row, coo.col)] + values = coo.data + dense_shape = coo.shape + return indices, values, dense_shape diff --git a/cellbox/cellbox/model_torch.py b/cellbox/cellbox/model_torch.py new file mode 100644 index 0000000..8863fca --- /dev/null +++ b/cellbox/cellbox/model_torch.py @@ -0,0 +1,95 @@ +import numpy as np +import torch +import torch.nn as nn +import cellbox.kernel +from cellbox.utils import loss, optimize + + +def factory(args): + """define model type based on configuration input""" + #if args.model == 'CellBox': + # return CellBox(args).build() + # Deprecated for now, use scikit-learn instead + # TODO: update the co-expression models + # if args.model == 'CoExp': + # return CoExp(args).build() + # if args.model == 'CoExp_nonlinear': + # return CoExpNonlinear(args).build() + if args.model == 'LinReg': + return LinReg(args) + #if args.model == 'NN': + # return NN(args).build() + # TODO: baysian model + # if args.model == 'Bayesian': + # return BN(args).build() + + +class PertBio(nn.Module): + """define abstract perturbation model""" + def __init__(self, args): + super().__init__() + self.args = args + self.n_x = args.n_x + self.pert_in, self.expr_out = args.pert_in, args.expr_out + self.iter_train, self.iter_monitor, self.iter_eval = args.iter_train, args.iter_monitor, args.iter_eval + #self.train_x, self.train_y = self.iter_train.get_next() + #self.monitor_x, self.monitor_y = self.iter_monitor.get_next() + #self.eval_x, self.eval_y = self.iter_eval.get_next() + self.l1_lambda, self.l2_lambda = self.args.l1_lambda_placeholder, self.args.l2_lambda_placeholder + self.lr = self.args.lr + + def get_ops(self): + """get operators for tensorflow""" + # Do we need this at all for Pytorch? + pass + #if self.args.weight_loss == 'expr': + # self.train_loss, self.train_mse_loss = loss(self.train_y, self.train_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda, weight=self.train_y) + # self.monitor_loss, self.monitor_mse_loss = loss(self.monitor_y, self.monitor_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda, weight=self.monitor_y) + # self.eval_loss, self.eval_mse_loss = loss(self.eval_y, self.eval_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda, weight=self.eval_y) + #elif self.args.weight_loss == 'None': + # self.train_loss, self.train_mse_loss = loss(self.train_y, self.train_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda) + # self.monitor_loss, self.monitor_mse_loss = loss(self.monitor_y, self.monitor_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda) + # self.eval_loss, self.eval_mse_loss = loss(self.eval_y, self.eval_yhat, self.params['W'], + # self.l1_lambda, self.l2_lambda) + #self.op_optimize = optimize(self.train_loss, self.lr) + + def get_variables(self): + """get model parameters (overwritten by model configuration)""" + raise NotImplementedError + + def forward(self, x, mu): + """forward propagation (overwritten by model configuration)""" + raise NotImplementedError + + #def build(self): + # """build model""" + # # Do we need this at all for Pytorch? + # self.params = {} + # self.get_variables() + # self.train_yhat = self.forward(self.train_y0, self.train_x) + # self.monitor_yhat = self.forward(self.monitor_y0, self.monitor_x) + # self.eval_yhat = self.forward(self.eval_y0, self.train_x) + # self.get_ops() + # return self + + +class LinReg(PertBio): + """linear regression model""" + def __init__(self): + super().__init__() + self.get_variables() + + def get_variables(self): + self.W = nn.Linear( + in_features=self.n_x, + out_features=self.n_x, + bias=True + ) + + def forward(self, x, mu): + return self.W(mu) diff --git a/cellbox/cellbox/utils_torch.py b/cellbox/cellbox/utils_torch.py new file mode 100644 index 0000000..f12f9ee --- /dev/null +++ b/cellbox/cellbox/utils_torch.py @@ -0,0 +1,74 @@ +""" +This module defines utility functions during training, including the loss function, +the optimizer, the timer, and the md5 key for each configuration +""" + +import time +import hashlib +import torch +import torch.nn as nn +import json + +def loss(x_gold, x_hat, W, l1=0, l2=0, weight=1.): + """ + Evaluate loss + + Args: + - x_gold, x_hat, W (torch.tensor) + - l1, l2, weight (float) + Returns: + - A single-value loss tensor, e.g. loss_mse = tensor(5) + """ + #if isinstance(x_gold, tf.SparseTensor): + # x_gold = tf.sparse.to_dense(x_gold) + loss_mse = torch.mean(torch.square(x_gold - x_hat)*torch.abs(weight)) + l1_loss = l1 * torch.sum(torch.abs(W)) + l2_loss = l2 * torch.sum(torch.square(torch.abs(W))) + loss_full = loss_mse + l1_loss + l2_loss + return loss_full, loss_mse + + +def optimize(loss_in, lr, optimizer=torch.optim.Adam, var_list=None): + """ + Optimize the training loss using Adam + Do we need this at all for Pytorch? + Args: + loss_in (float): training loss, mean squared error + L1 regularization term + lr (float): placeholder for learning rate + optimizer: default tf.train.AdamOptimizer + var_list: list of vars to be optimized + Returns: + opt_op (optimizer): op to optimize the training loss + loss (loss): training loss, including regularization if applicable + """ + #if var_list is None: + # var_list = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.GLOBAL_VARIABLES) + #with tf.compat.v1.variable_scope("optimization", reuse=tf.compat.v1.AUTO_REUSE): + # opt = optimizer(lr) + # opt_op = opt.minimize(loss_in, var_list=var_list) + #return opt_op + pass + + +class TimeLogger: + """calculate training time""" + def __init__(self, time_logger_step=1, hierachy=1): + self.time_logger_step = time_logger_step + self.step_count = 0 + self.hierachy = hierachy + self.time = time.time() + + def log(self, s): + """time log""" + if self.step_count % self.time_logger_step == 0: + print("#" * 4 * self.hierachy, " ", s, " --time elapsed: %.2f" % (time.time() - self.time)) + self.time = time.time() + self.step_count += 1 + + +def md5(obj): + """ + returns a hashed with md5 string of the key + """ + key = json.dumps(vars(obj), sort_keys=True) + return hashlib.md5(key.encode()).hexdigest() diff --git a/data/expr_ind_last_col.csv b/data/expr_ind_last_col.csv new file mode 100644 index 0000000..42b74f9 --- /dev/null +++ b/data/expr_ind_last_col.csv @@ -0,0 +1,89 @@ +-0.435,-0.608,-0.855,-0.712,-0.113,-0.089,0.096,0.291,0.428,0.375,0.238,-0.338,-0.559,-0.614,-0.514,0.048,-0.298,0.277,-0.626,-0.306,0.131,-0.283,-0.089,0.056,0.044,-0.333,-0.006,-0.024,0.049,0.022,-0.045,0.315,-0.111,0.032,-0.024,0.116,-0.066,-0.367,0.297,-0.022,-0.553,-0.134,-0.704,-0.481,-0.543,0.066,-0.118,0.162,-0.283,0.153,0.125,-0.028,0.011,-0.109,0.009,-0.154,0.008,0.124,0.167,0.281,0.135,0.04,-0.159,-0.042,0.141,0.092,0.154,0.177,0.103,0.09,0.277,-0.121,-0.076,0.045,0.305,0.201,0.052,0.047,-0.004,0.132,0.024,0.182,0.0,0.563,0.0,0.0,0.242,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0 +-0.308,-0.322,-0.999,-0.594,0.022,-0.061,0.093,0.222,0.255,0.645,-0.42,-0.114,-0.564,-0.52,-0.438,0.391,-0.202,-0.172,-0.585,-0.154,0.0,-0.274,-0.058,-0.083,0.159,-0.273,-0.002,0.173,0.055,0.027,0.166,0.436,-0.312,-0.118,0.405,-0.173,0.001,0.743,0.195,-0.007,-0.621,-0.099,-0.551,-0.316,-0.778,-0.061,-0.002,0.102,-0.093,0.18,-0.021,0.099,0.037,-0.143,0.112,-0.177,-0.072,-0.054,0.02,0.21,-0.042,0.104,-0.116,-0.038,0.09,0.088,0.041,0.056,0.3,0.053,0.147,-0.223,0.121,0.15,0.21,0.081,-0.051,-0.074,-0.015,0.239,-0.024,0.049,0.0,0.0,1.748,0.0,0.431,-0.698,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +-0.402,-0.549,-0.837,-0.675,-0.011,-0.103,0.089,0.418,0.276,0.439,0.084,-0.265,-0.569,-0.5,-0.505,0.092,-0.176,0.343,-0.653,-0.281,0.102,-0.265,0.123,0.1,0.087,-0.147,-0.025,0.002,0.091,0.134,0.017,0.445,-0.255,-0.023,-0.054,0.065,0.009,-0.854,0.173,-0.071,-0.589,-0.078,-0.727,-0.498,-0.63,0.038,-0.026,0.097,-0.056,0.145,0.005,-0.027,0.023,-0.135,0.066,-0.13,0.104,0.021,0.072,0.195,-0.01,0.09,-0.192,-0.091,0.118,0.046,0.028,0.025,0.124,0.066,0.218,-0.198,0.073,0.112,0.307,0.095,0.006,-0.059,0.023,0.109,0.021,0.23,0.0,0.464,0.0,0.0,0.143,-0.698,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +-0.475,-0.652,-0.935,-0.751,-0.085,-0.019,0.064,0.207,0.147,0.416,0.17,-0.39,-0.541,-0.74,-0.624,0.174,-0.151,0.237,-0.631,-0.289,-0.023,-0.342,-0.049,0.026,-0.08,-0.266,-0.049,0.065,0.081,0.149,0.028,0.41,-0.267,-0.066,-0.08,-0.004,-0.028,-0.509,0.198,-0.148,-0.602,-0.068,-0.757,-0.584,-0.928,0.071,-0.079,0.086,0.059,0.215,0.056,0.035,-0.018,-0.067,0.076,-0.093,0.031,0.007,0.186,0.175,0.035,-0.013,-0.147,-0.063,0.12,0.063,0.021,0.188,0.098,-0.032,0.183,-0.176,0.115,0.017,0.186,0.12,-0.042,-0.228,-0.068,0.072,-0.024,0.221,0.0,0.532,0.0,0.0,-0.07,-0.698,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +-0.611,-1.097,-1.234,-0.928,-0.114,-0.113,-0.002,-0.042,0.277,0.732,0.205,-0.799,-0.829,-0.758,-0.66,0.187,-0.214,0.316,-0.825,-0.236,0.154,-0.337,0.021,0.02,0.01,-0.212,-0.027,0.096,0.154,0.2,-0.127,0.714,-0.409,-0.013,-0.064,0.005,0.02,-0.789,0.256,-0.299,-0.747,-0.007,-1.241,-0.826,-1.02,0.061,-0.109,0.108,0.016,0.214,0.059,0.071,0.017,-0.191,0.142,-0.165,-0.013,0.012,0.138,0.238,0.016,0.052,-0.21,-0.093,0.116,0.109,0.114,0.12,0.128,0.032,0.236,-0.301,0.052,-0.083,0.174,0.189,0.025,-0.156,-0.048,0.286,0.037,0.282,0.0,0.701,0.0,0.0,0.286,-0.698,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,4 +-0.649,-0.49,-0.294,-0.491,-0.251,-0.361,-0.018,-0.144,1.191,0.098,-0.364,-0.133,-0.379,-0.053,-0.797,-0.178,-0.113,0.269,-0.911,-0.238,-0.404,0.146,0.016,0.224,0.028,-0.283,0.055,-0.041,0.412,-0.279,0.922,0.1,-0.377,-0.418,-0.036,0.088,0.479,1.345,0.087,0.05,-0.846,-0.028,-2.788,-1.255,-0.737,0.041,0.098,0.07,-0.199,0.187,0.075,0.043,0.054,-0.124,-0.028,-0.103,0.027,0.333,0.066,0.274,0.086,-0.105,-0.309,-0.177,0.23,0.084,0.101,0.047,0.292,0.323,0.2,-0.088,0.073,0.051,0.192,0.162,0.02,-0.157,0.188,-0.232,0.102,0.484,0.0,0.0,0.0,0.293,-0.582,-0.698,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,5 +-0.484,-0.947,-1.062,-0.826,-0.18,-0.119,-0.274,-0.038,0.544,0.709,0.048,-0.656,-0.74,-0.801,-0.661,0.129,-0.248,0.356,-0.917,-0.291,0.178,-0.351,0.046,0.177,-0.197,-0.081,0.056,-0.042,-0.014,0.135,-0.154,0.676,-0.508,-0.04,0.059,0.11,0.055,-0.622,0.147,-0.172,-0.734,0.525,-1.474,-0.822,-0.59,0.051,0.034,0.048,0.244,0.185,0.099,0.0,0.067,-0.386,0.024,-0.146,0.003,0.194,0.158,0.139,0.175,0.029,-0.198,-0.073,0.065,0.127,0.079,0.077,0.148,0.086,0.216,-0.232,-0.293,-0.046,0.165,0.222,-0.083,-0.031,0.032,0.045,0.098,0.332,0.0,0.675,0.0,0.0,-0.219,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,6 +-0.575,-0.539,-0.87,-0.695,-0.215,-0.043,-0.051,0.059,0.434,0.511,0.199,-0.315,-0.59,-0.713,-0.608,0.142,-0.174,0.343,-0.529,-0.26,0.126,-0.378,-0.126,-0.001,0.11,-0.295,-0.055,-0.069,0.152,0.045,-0.091,0.344,-0.115,-0.094,-0.067,0.045,-0.026,-0.602,0.152,0.022,-0.501,-0.146,-0.537,-0.445,-0.697,0.047,-0.09,0.07,-0.246,0.138,0.091,0.094,-0.026,0.019,0.053,-0.115,0.02,0.194,0.184,0.197,0.161,0.027,-0.077,-0.016,0.086,0.114,0.032,0.034,0.06,0.025,0.224,-0.061,-0.004,-0.072,0.157,0.177,0.033,-0.025,-0.013,0.142,-0.024,0.225,0.0,0.474,0.0,0.0,-0.129,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,7 +-0.654,-0.927,-1.079,-0.6,-0.611,0.575,1.507,-0.378,0.312,0.403,0.625,-1.118,-0.382,-0.285,-0.358,0.163,-0.234,0.429,-1.517,-0.188,0.047,-0.093,-0.026,-0.306,0.873,-0.821,0.05,0.35,0.137,0.292,0.285,0.362,0.094,-0.004,-0.079,0.107,0.233,-0.191,0.412,-0.337,-0.861,0.034,-4.649,-4.13,-1.037,0.011,-0.08,0.077,-0.093,0.243,0.061,0.026,-0.008,0.061,0.168,-0.147,-0.139,0.106,0.179,0.187,0.189,-0.096,-0.126,-0.16,0.16,0.185,0.097,0.23,0.237,-0.148,0.25,-0.294,-0.015,-0.226,0.016,0.31,0.066,-0.08,-0.213,0.181,-0.161,0.366,0.0,0.469,0.0,0.0,-0.888,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,8 +-1.088,-1.138,-1.09,-0.7,-0.472,0.278,-1.721,-0.498,0.356,0.447,0.566,-1.457,-0.385,-1.187,-1.024,0.22,-0.24,0.429,-1.079,-0.233,0.153,-0.403,-0.285,-0.367,-0.498,-0.407,-0.027,0.238,0.211,0.3,0.359,0.592,-0.131,0.044,-0.037,0.089,0.337,-0.972,0.466,-0.511,-0.833,0.171,-2.5,-1.76,-1.4,0.194,-0.084,0.167,-0.138,0.307,0.246,0.11,-0.14,0.106,0.184,-0.134,-0.17,0.146,0.172,0.283,0.259,-0.261,-0.138,-0.242,0.204,0.214,0.202,0.374,0.036,-0.136,0.443,-0.259,-0.113,-0.251,-0.091,0.419,-0.022,-0.326,-0.242,0.265,-0.136,0.286,0.0,0.683,0.0,0.0,-0.293,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,9 +-0.072,-0.883,-1.094,-0.728,-0.043,-0.276,0.286,0.385,0.345,0.594,-0.029,-0.813,-0.831,-0.109,-0.127,0.055,-0.026,0.301,-0.661,0.013,0.029,0.119,-0.022,0.181,0.285,-0.014,0.103,-0.004,0.092,0.007,-0.264,0.587,-0.535,0.019,0.112,0.481,0.036,-0.794,0.022,-0.306,-0.754,0.122,-0.813,-0.756,-0.239,0.008,0.032,-0.012,-0.073,0.014,-0.001,-0.087,0.124,-0.285,-0.005,-0.143,0.102,0.05,-0.002,0.149,-0.023,0.072,-0.15,-0.114,0.041,0.007,-0.051,-0.048,0.09,0.106,0.179,-0.247,-0.013,0.007,0.11,0.012,-0.093,-0.068,0.079,0.129,0.05,0.121,0.0,0.642,0.0,0.0,0.24,-0.797,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10 +-0.544,-0.237,-0.311,-0.202,-0.255,0.581,-2.841,-0.186,-0.132,0.084,0.467,-0.683,-0.257,-0.861,-0.826,0.314,-0.134,0.316,-0.984,0.108,0.025,-0.407,0.081,0.201,-2.056,0.105,-0.034,0.594,-0.162,0.179,0.428,0.057,-0.187,-0.155,0.092,0.142,0.254,-0.662,0.345,-0.454,0.01,0.284,-1.866,-1.26,-0.254,0.053,-0.053,0.027,-0.076,0.187,0.152,0.241,-0.023,-0.269,0.084,-0.031,-0.206,0.04,0.015,0.205,0.13,-0.135,-0.172,-0.041,0.232,0.094,0.05,0.114,0.089,-0.118,0.147,-0.056,-0.106,0.013,0.155,0.094,-0.058,-0.218,-0.014,-0.288,-0.146,0.127,0.0,0.054,0.033,0.0,-1.107,0.0,-0.993,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11 +-0.763,-0.463,-0.433,-0.535,-0.305,0.359,-2.53,-0.79,0.247,0.229,0.514,-0.391,-0.147,-1.185,-0.96,0.143,-0.123,0.156,-0.892,-0.32,0.179,-0.651,0.089,0.076,-1.999,-0.374,-0.051,0.322,-0.269,0.221,0.34,0.022,-0.035,-0.123,-0.023,-0.009,0.189,-0.351,0.354,-0.053,-0.025,-0.016,-1.458,-0.952,-0.608,-0.073,-0.21,0.044,-0.206,0.308,0.144,0.304,-0.045,-0.055,0.209,-0.105,-0.195,0.225,0.12,0.195,0.146,-0.048,-0.151,-0.114,0.277,0.077,0.005,0.159,0.113,0.153,0.011,-0.153,-0.033,0.103,0.047,0.294,-0.071,-0.138,-0.274,-0.103,-0.21,0.216,0.0,0.162,0.0,0.0,-0.262,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12 +-0.786,-1.015,-1.229,-0.693,-0.427,0.103,-2.158,-0.537,0.414,0.56,0.584,-0.846,-0.745,-1.289,-0.975,0.187,-0.04,0.351,-1.043,-0.344,0.204,-0.519,0.154,0.072,-1.949,-0.355,0.129,0.284,-0.062,0.206,0.23,0.44,-0.27,-0.14,0.033,0.093,0.231,-1.036,0.462,-0.387,-0.683,0.128,-2.575,-1.871,-0.654,0.022,-0.143,0.054,-0.165,0.319,0.207,0.21,0.034,-0.207,0.167,-0.147,-0.144,0.156,0.134,0.262,0.19,-0.13,-0.191,-0.211,0.304,0.156,0.054,0.381,0.209,0.133,0.178,-0.227,0.027,-0.032,0.085,0.383,-0.095,-0.154,-0.237,-0.15,-0.14,0.269,0.0,0.638,0.0,0.0,0.041,-0.698,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13 +-0.817,-0.308,-0.722,-0.446,-0.416,0.74,-2.701,-0.384,0.089,0.38,-0.14,-0.347,-0.096,-1.145,-0.899,0.417,-0.268,-0.172,-0.948,-0.141,0.15,-0.605,-0.27,0.069,-2.105,-0.205,0.016,0.311,-0.348,0.052,0.27,0.2,-0.299,-0.095,0.387,0.076,0.167,0.974,0.481,-0.082,0.096,0.079,-1.549,-0.988,-0.499,0.012,-0.153,0.047,-0.279,0.28,0.11,0.13,0.01,-0.249,0.139,-0.126,-0.197,0.097,-0.009,0.229,0.21,0.006,-0.144,0.043,0.223,0.143,0.066,0.23,0.338,0.172,0.246,-0.215,-0.077,0.051,0.201,0.285,0.027,-0.151,-0.098,-0.001,-0.258,0.044,0.0,0.0,1.574,0.0,-0.232,0.0,-0.989,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,14 +-0.647,-0.298,-0.545,-0.493,-0.318,0.372,-2.707,-0.19,0.285,0.183,0.471,-0.259,-0.181,-1.076,-0.885,0.108,-0.224,0.32,-0.974,-0.329,0.123,-0.608,0.105,0.268,-2.149,-0.055,0.039,0.274,-0.257,0.09,0.445,0.076,-0.07,-0.071,0.084,0.142,0.139,-0.461,0.403,-0.168,0.029,-0.011,-1.57,-0.967,-0.457,-0.061,-0.176,0.084,-0.391,0.217,0.127,0.15,0.054,-0.183,0.089,-0.14,-0.163,0.186,0.067,0.179,0.166,0.049,-0.172,-0.086,0.176,0.132,0.119,0.32,0.167,0.072,0.234,-0.076,-0.106,0.184,0.366,0.232,-0.037,-0.108,-0.154,-0.201,-0.16,0.188,0.0,0.045,0.261,0.0,-0.106,0.0,-0.989,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,15 +-0.64,-0.27,-0.509,-0.522,-0.405,0.398,-2.628,-0.331,0.21,0.195,0.531,-0.337,-0.131,-1.098,-0.877,0.152,-0.127,0.166,-1.001,-0.24,-0.059,-0.617,-0.081,0.185,-2.09,-0.042,0.064,0.32,-0.34,0.124,0.306,0.084,-0.068,-0.065,0.11,0.121,0.091,-0.485,0.346,-0.153,-0.034,0.042,-1.527,-0.925,-0.156,0.046,-0.068,0.076,-0.201,0.226,0.157,0.178,-0.039,-0.094,0.069,-0.156,-0.183,0.242,0.1,0.171,0.235,-0.114,-0.182,0.043,0.179,0.129,0.046,0.237,0.115,0.066,0.16,-0.094,0.011,0.023,0.272,0.256,-0.112,-0.064,-0.114,-0.193,-0.186,0.22,0.0,0.123,0.018,0.0,-0.151,0.0,-0.989,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16 +-0.68,-0.589,-0.841,-0.795,-0.265,0.149,-2.227,-0.245,0.575,0.438,0.857,-0.491,-0.472,-1.242,-0.955,0.16,-0.231,0.402,-1.037,-0.277,0.256,-0.596,0.113,0.276,-1.964,-0.227,-0.016,0.263,-0.142,0.136,0.334,0.262,-0.201,-0.068,0.105,0.104,0.272,-0.715,0.411,-0.187,-0.468,0.052,-2.184,-1.241,-0.393,-0.077,-0.215,0.086,-0.272,0.334,0.162,0.27,-0.021,-0.172,0.163,-0.101,-0.152,0.281,0.055,0.293,0.209,0.009,-0.206,-0.065,0.265,0.175,0.143,0.121,0.192,0.174,0.182,-0.126,-0.132,0.066,0.167,0.234,-0.038,0.007,-0.237,-0.078,-0.171,0.331,0.0,0.505,0.0,0.0,-0.294,0.0,-0.989,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,17 +-0.847,-0.561,-0.052,-0.245,-0.357,0.591,-2.308,0.097,1.166,-0.071,0.257,-0.334,-0.285,-0.901,-1.49,-0.026,-0.13,0.179,-0.924,-0.124,-0.495,-0.445,-0.019,0.55,-2.259,0.276,-0.075,0.174,0.322,-0.239,0.904,-0.022,-0.185,-0.58,0.134,0.106,0.486,0.752,0.301,-0.446,-0.705,0.135,-3.914,-3.054,-0.38,-0.016,0.044,0.041,-0.183,0.207,0.14,0.178,0.002,-0.164,0.093,-0.147,-0.118,0.271,-0.093,0.192,0.163,-0.065,-0.375,-0.252,0.328,0.088,0.168,0.202,0.225,0.259,0.218,-0.094,0.131,0.159,0.556,0.231,0.329,-0.289,0.113,-0.424,-0.108,0.322,0.0,0.0,0.047,0.342,-2.255,0.0,-0.989,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,18 +-0.678,-0.59,-0.712,-0.759,-0.449,0.374,-2.648,-0.19,0.377,0.356,0.56,-0.555,-0.351,-1.148,-0.905,0.287,-0.238,0.286,-1.025,-0.384,0.279,-0.599,-0.048,0.16,-2.181,-0.139,0.078,0.274,-0.5,0.279,0.264,0.267,-0.199,-0.044,0.049,0.149,0.262,-0.318,0.468,-0.259,-0.243,0.54,-2.126,-1.179,-0.729,0.091,-0.182,0.102,-0.218,0.27,0.23,0.278,-0.04,-0.207,0.143,-0.139,-0.246,0.255,0.137,0.254,0.261,-0.062,-0.163,-0.023,0.161,0.187,0.092,0.277,0.129,0.024,0.188,-0.187,-0.292,0.057,0.192,0.315,-0.054,-0.167,-0.191,-0.122,-0.195,0.237,0.0,0.329,0.0,0.0,-0.247,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,19 +-0.715,-0.314,-0.454,-0.566,-0.442,0.384,-2.772,-0.316,0.432,0.252,0.607,-0.35,-0.173,-1.159,-0.95,0.21,-0.186,0.272,-0.907,-0.315,0.039,-0.666,-0.145,0.1,-2.119,-0.239,-0.042,0.262,-0.27,0.193,0.176,0.011,-0.021,-0.182,0.139,0.093,0.02,-0.464,0.346,-0.139,-0.059,-0.044,-1.425,-0.877,-0.585,0.098,-0.089,0.104,-0.185,0.179,0.22,0.229,-0.066,0.024,0.052,-0.162,-0.206,0.313,0.127,0.134,0.239,-0.162,-0.165,0.112,0.201,0.113,0.004,0.128,0.054,0.059,0.112,-0.059,-0.049,-0.021,0.164,0.245,-0.002,-0.024,-0.121,-0.182,-0.247,0.282,0.0,0.085,0.116,0.0,-1.486,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,20 +-0.825,-0.441,-0.645,-0.518,-0.637,0.667,-0.829,-0.294,0.305,0.28,0.778,-0.906,0.093,-1.002,-0.919,0.106,-0.269,0.382,-1.679,-0.281,0.013,-0.499,-0.031,-0.054,-1.994,-0.119,0.022,0.408,-0.241,0.235,0.587,-0.088,0.118,-0.061,0.081,0.267,0.192,-0.338,0.463,-0.536,-0.115,0.014,-4.261,-3.759,-0.686,0.084,-0.082,0.08,-0.29,0.305,0.179,0.247,-0.081,0.001,0.192,-0.088,-0.137,0.24,0.062,0.256,0.312,-0.128,-0.081,-0.007,0.192,0.214,0.212,0.395,0.193,-0.004,0.167,-0.1,-0.071,-0.151,0.189,0.363,0.163,-0.275,-0.209,-0.046,-0.262,0.21,0.0,0.025,0.178,0.0,-1.448,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,21 +-1.378,-0.875,-0.747,-0.75,-0.683,0.871,-3.313,-1.095,0.421,0.36,0.872,-1.161,-0.191,-1.504,-1.258,0.274,-0.141,0.367,-1.376,-0.245,0.131,-0.614,-0.272,-0.33,-2.249,-0.531,0.004,0.532,-0.126,0.438,0.414,0.106,0.073,-0.354,0.093,0.11,0.138,-0.572,0.512,-0.791,-0.19,0.109,-3.252,-2.448,-0.645,0.108,-0.223,0.105,-0.182,0.32,0.338,0.346,-0.309,0.187,0.218,-0.139,-0.392,0.293,0.199,0.319,0.328,-0.349,-0.248,-0.172,0.246,0.169,0.091,0.211,0.022,0.015,-0.052,-0.194,-0.132,-0.233,-0.303,0.424,-0.016,-0.273,-0.443,-0.268,-0.512,0.288,0.0,0.247,0.0,0.0,-1.361,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,22 +-0.08,0.109,0.0,0.145,-0.126,0.396,0.052,-0.273,-0.191,0.426,-0.832,-0.456,0.121,-0.086,0.099,0.572,-0.077,-0.636,-0.219,0.309,0.055,0.056,-0.595,-0.065,0.107,-0.298,-0.006,0.343,0.0,-0.145,0.152,0.567,-0.447,-0.074,0.967,-0.065,0.128,1.901,0.286,0.046,0.072,0.155,-0.367,0.064,0.049,0.056,-0.055,0.074,-0.129,0.07,-0.071,-0.069,-0.087,-0.177,0.055,-0.124,-0.34,-0.029,-0.062,0.216,0.028,0.0,-0.074,0.329,0.101,0.027,-0.055,-0.08,0.29,0.061,0.176,-0.392,-0.072,-0.093,-0.101,0.066,0.364,-0.305,0.045,0.102,-0.14,-0.107,0.0,0.0,1.658,0.0,-0.197,0.0,0.0,-0.956,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,23 +-0.173,0.079,-0.137,0.095,-0.089,0.173,-0.07,0.217,-0.139,0.155,-0.465,-0.148,0.148,-0.029,-0.007,0.367,0.051,-0.313,-0.038,0.104,-0.14,-0.174,-0.268,-0.166,0.005,-0.057,-0.135,0.199,-0.116,-0.008,0.07,0.115,-0.131,-0.118,0.498,-0.083,-0.011,1.479,0.042,0.065,0.12,-0.024,0.033,0.042,0.347,0.029,-0.002,-0.015,0.019,0.0,0.026,-0.037,0.004,-0.093,-0.075,-0.242,-0.099,-0.132,0.043,-0.028,-0.021,-0.043,-0.004,0.12,-0.001,-0.04,-0.057,0.028,0.111,-0.1,0.036,-0.095,0.011,-0.051,0.086,0.027,-0.053,-0.126,0.091,0.12,-0.08,-0.232,0.0,0.0,1.562,0.0,0.102,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,24 +-0.513,0.022,-0.128,-0.143,-0.167,0.219,-0.019,0.099,-0.299,0.143,-0.555,-0.043,0.092,-0.268,-0.196,0.201,-0.152,-0.253,-0.156,0.046,-0.123,-0.242,-0.235,-0.125,0.017,-0.361,-0.002,0.147,-0.177,-0.064,-0.013,-0.014,0.006,-0.233,0.383,-0.074,-0.072,1.716,0.205,0.165,0.094,0.047,-0.152,-0.294,-0.424,0.109,-0.037,0.113,-0.026,0.039,-0.019,-0.025,-0.091,-0.038,0.05,-0.046,-0.204,0.081,0.053,0.172,0.022,-0.12,-0.028,0.08,0.057,0.036,-0.01,-0.001,0.178,0.248,0.142,-0.159,-0.125,-0.114,0.063,0.066,-0.074,-0.025,-0.032,0.048,-0.128,0.001,0.0,0.0,1.746,0.0,-0.558,0.0,0.0,-0.89,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,25 +-0.531,-0.262,0.258,-0.003,-0.276,0.243,0.453,0.452,0.833,0.009,-0.508,-0.211,0.226,0.388,-0.675,0.219,0.014,-0.261,-0.133,0.115,-0.487,0.207,-0.39,0.236,0.166,-0.108,-0.191,-0.153,0.073,-0.174,0.87,0.064,-0.322,-0.53,0.386,-0.058,0.523,1.43,0.02,-0.149,-0.863,0.1,-1.887,-1.24,-0.3,0.111,0.079,0.016,0.01,0.032,0.052,0.106,-0.131,-0.115,-0.054,-0.155,-0.115,-0.06,-0.037,0.013,0.02,-0.161,-0.337,0.008,0.197,0.016,-0.103,0.143,0.242,0.054,0.115,-0.138,0.096,-0.041,0.008,0.167,0.445,-0.57,0.221,-0.194,-0.112,0.019,0.0,0.0,1.36,0.0,-0.673,0.0,0.0,-0.89,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,26 +-0.431,-0.108,-0.346,-0.531,-0.298,0.089,0.027,-0.043,-0.227,0.283,-0.54,-0.251,-0.067,-0.341,-0.274,0.2,-0.103,-0.32,-0.319,0.016,0.057,-0.22,-0.359,-0.145,0.049,-0.428,0.016,0.031,-0.069,-0.012,-0.052,0.064,-0.246,-0.278,0.333,-0.004,0.045,1.389,0.205,0.127,-0.087,0.495,-0.392,-0.435,-0.672,0.129,-0.056,0.088,0.015,0.099,0.015,-0.051,-0.086,-0.049,0.057,-0.142,-0.181,0.029,0.016,0.093,0.039,-0.164,-0.062,0.195,0.006,0.065,-0.108,0.142,0.16,0.284,0.152,-0.201,-0.307,-0.146,0.053,0.222,-0.089,-0.076,0.008,0.054,-0.217,-0.038,0.0,0.0,1.809,0.0,-0.461,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,27 +-0.494,-0.042,-0.435,-0.021,-0.509,0.407,1.349,-0.276,-0.164,0.117,-0.17,-0.625,0.256,0.232,0.153,0.238,-0.015,-0.071,-1.334,0.114,-0.195,0.214,-0.459,-0.371,0.887,-0.658,-0.132,0.477,0.055,0.106,0.326,-0.039,-0.027,-0.182,0.447,-0.007,0.1,0.991,0.203,-0.208,-0.17,0.076,-2.865,-2.328,-0.027,0.166,0.028,0.03,-0.008,0.069,0.047,-0.055,-0.088,0.081,-0.044,-0.08,-0.258,-0.07,0.018,0.043,0.173,-0.175,-0.08,0.264,0.095,0.097,0.068,0.28,0.148,-0.107,0.101,-0.16,0.035,-0.287,-0.084,0.25,0.135,-0.161,0.085,-0.111,-0.32,-0.155,0.029,0.0,1.638,0.0,-1.143,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,28 +-0.423,-0.014,0.005,-0.244,-0.129,0.02,-0.195,0.001,0.065,0.092,0.094,-0.082,0.047,-0.31,-0.278,0.077,0.003,0.199,-0.202,-0.093,-0.018,-0.216,-0.061,-0.046,-0.107,0.157,-0.109,-0.042,0.101,0.039,-0.015,-0.026,-0.025,0.04,0.109,0.033,-0.05,0.016,0.042,0.023,0.115,-0.008,-0.019,0.228,0.347,0.02,-0.017,0.038,0.064,0.006,0.082,-0.028,-0.102,-0.106,-0.118,-0.073,-0.043,0.082,0.102,-0.035,0.096,-0.069,-0.081,0.259,-0.019,0.0,-0.056,0.176,-0.055,-0.145,0.058,0.036,-0.032,-0.125,0.076,0.209,0.046,-0.044,0.104,-0.028,-0.027,-0.015,1.204,0.0,0.093,0.091,-0.121,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,29 +-0.758,-0.26,0.265,-0.147,-0.32,-0.015,-0.295,-0.044,0.886,-0.038,-0.307,-0.054,-0.116,0.187,-0.75,-0.151,0.065,0.226,-0.707,-0.177,-0.535,0.221,-0.178,0.194,-0.246,0.001,-0.163,-0.176,0.213,-0.296,0.667,-0.178,-0.194,-0.526,0.059,0.008,0.175,1.609,-0.007,0.095,-0.344,0.162,-1.642,-0.965,-0.617,0.157,0.14,0.025,0.161,0.017,0.035,0.059,-0.132,0.118,-0.11,-0.092,-0.075,0.239,0.037,0.104,0.091,-0.23,-0.239,-0.077,0.107,0.019,0.04,0.162,0.016,0.136,0.074,-0.045,-0.051,-0.074,0.158,0.078,0.088,-0.211,0.13,-0.373,0.013,0.255,0.876,0.0,0.204,0.769,-2.241,0.0,0.0,0.0,-0.393,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,30 +-0.103,-0.251,-0.259,-0.545,-0.24,-0.071,-0.195,-0.157,0.163,0.151,-0.13,-0.228,-0.01,-0.496,-0.436,0.082,-0.091,0.336,-0.464,-0.208,0.135,-0.235,0.003,0.01,-0.142,0.001,0.012,-0.117,0.076,0.115,-0.086,0.097,-0.282,-0.034,0.13,0.134,0.015,0.136,0.047,-0.004,-0.145,0.523,-0.447,-0.08,0.056,0.049,0.032,0.105,0.078,-0.067,0.098,0.045,-0.045,-0.153,-0.059,-0.012,-0.084,0.118,0.128,0.002,0.103,-0.075,-0.076,0.157,-0.04,0.027,-0.032,0.128,-0.014,-0.023,0.101,0.059,-0.326,-0.101,0.132,0.204,0.003,-0.111,0.162,-0.032,0.019,-0.045,0.0,0.062,0.296,0.0,0.142,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,31 +-0.606,-0.065,-0.316,-0.141,-0.393,0.637,1.387,-0.044,0.012,0.066,0.445,-0.704,0.286,0.119,-0.027,-0.034,-0.104,0.602,-1.376,0.022,-0.172,0.18,-0.083,-0.204,0.916,-0.662,-0.08,0.315,0.069,0.24,0.295,-0.117,0.104,0.11,0.189,0.152,0.128,0.021,0.24,-0.311,-0.219,0.125,-3.001,-2.454,-0.032,0.037,0.139,0.123,0.108,-0.022,0.066,0.024,0.019,0.163,-0.068,-0.081,-0.059,0.176,0.068,0.039,0.215,-0.203,-0.099,0.184,0.008,0.044,-0.02,0.226,0.035,-0.012,0.164,0.017,-0.135,-0.291,-0.088,0.35,0.194,-0.169,0.028,-0.068,-0.12,0.121,0.056,0.0,0.454,0.246,-1.563,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,32 +-0.273,0.014,0.064,-0.214,-0.194,0.138,-0.126,-0.191,0.101,0.066,-0.367,-0.006,-0.031,-0.248,-0.245,0.143,-0.033,0.163,-0.066,-0.022,0.016,-0.107,-0.146,0.007,-0.113,-0.286,-0.004,0.012,-0.032,-0.091,-0.113,-0.029,0.043,-0.048,-0.081,0.27,0.009,0.142,0.098,0.07,-0.061,0.155,-0.243,-0.32,-0.484,-0.03,-0.036,0.067,0.18,0.006,0.003,-0.051,-0.126,0.033,-0.019,0.076,-0.102,0.104,0.041,0.12,0.103,-0.065,-0.021,0.054,0.018,0.0,-0.039,0.142,0.048,-0.013,0.119,0.008,-0.18,-0.143,0.045,0.132,0.035,-0.179,-0.022,-0.025,-0.028,0.123,0.273,0.0,0.153,0.101,-0.105,0.0,0.0,0.0,-0.266,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,33 +-0.149,-0.205,-0.185,-0.216,0.062,0.118,-0.163,0.066,-0.129,0.143,-0.042,-0.088,-0.096,-0.525,-0.423,0.203,-0.018,0.045,-0.191,-0.152,-0.167,-0.358,-0.054,-0.153,-0.253,-0.161,-0.086,0.198,0.023,0.079,0.113,0.138,-0.036,-0.05,-0.161,-0.136,-0.13,-0.098,-0.015,-0.024,-0.036,-0.067,-0.151,-0.026,-0.729,0.036,-0.055,0.009,0.12,0.02,0.078,0.132,-0.129,0.03,0.043,-0.008,-0.106,-0.025,0.086,0.125,0.079,-0.068,-0.017,-0.075,0.035,0.005,-0.081,0.011,0.009,-0.162,0.02,-0.023,0.083,-0.056,0.158,-0.035,0.052,-0.223,-0.043,-0.072,-0.137,-0.072,0.583,0.066,0.0,0.0,0.072,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,34 +-0.003,-0.023,-0.388,-0.094,0.092,0.242,-0.017,0.028,-0.321,0.24,-0.628,-0.121,0.095,-0.387,-0.468,0.287,-0.01,-0.355,-0.252,0.017,-0.225,-0.28,-0.216,-0.117,-0.19,0.015,-0.41,0.13,-0.121,-0.05,0.008,0.138,-0.078,-0.09,0.359,-0.151,-0.131,1.11,0.105,0.121,0.175,-0.022,0.06,-0.263,-0.328,0.072,-0.078,-0.016,-0.096,0.066,-0.051,0.049,-0.003,0.001,0.016,-0.161,-0.005,-0.13,0.037,0.039,0.054,0.058,-0.05,0.066,0.055,-0.013,-0.076,0.102,0.214,-0.356,0.068,-0.119,0.084,-0.026,0.091,0.091,-0.01,-0.218,-0.041,0.203,-0.05,-0.14,0.0,0.0,1.766,0.0,-0.512,0.0,0.0,-0.89,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,35 +-0.311,-0.51,-0.252,-0.239,-0.124,0.12,-0.176,-0.25,-0.273,0.142,-0.159,-0.066,-0.189,-0.524,-0.449,0.155,-0.038,0.146,-0.297,-0.169,-0.122,-0.368,-0.122,-0.201,-0.348,-0.217,-0.026,0.068,-0.016,0.062,0.064,-0.004,-0.078,-0.057,-0.108,-0.087,-0.115,-0.152,0.161,0.056,0.062,0.039,-0.003,-0.034,-0.944,0.108,-0.079,0.031,0.034,0.208,0.102,0.107,-0.293,0.106,0.114,-0.04,-0.157,0.062,0.086,0.213,0.151,-0.089,-0.006,-0.06,0.126,0.122,0.031,0.184,0.011,-0.244,-0.099,-0.104,0.066,-0.168,0.088,0.187,-0.012,-0.285,-0.157,-0.029,-0.142,0.021,1.267,0.0,0.204,0.093,0.216,0.0,0.0,0.0,-0.393,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,36 +-0.648,-0.261,0.126,-0.111,-0.093,0.017,-0.243,0.029,1.04,-0.055,-0.159,0.13,-0.214,-0.07,-0.879,0.03,0.002,0.069,-0.684,-0.15,-0.593,-0.061,-0.041,0.158,-0.287,-0.093,-0.162,-0.117,0.348,-0.182,0.713,-0.022,-0.143,-0.389,0.069,-0.09,0.225,1.382,-0.005,0.046,-0.374,0.005,-1.536,-0.822,-0.733,0.054,0.027,0.012,-0.034,0.072,0.087,0.172,-0.056,0.15,0.008,-0.099,0.012,0.241,0.023,0.081,0.054,-0.13,-0.248,-0.037,0.159,0.008,0.022,0.069,0.106,-0.03,0.008,-0.091,0.064,0.016,0.152,0.076,0.04,-0.103,0.112,-0.268,-0.01,0.218,0.654,0.0,0.18,0.697,-1.875,0.0,0.0,0.0,0.0,-0.474,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,37 +-0.308,-0.265,-0.38,-0.495,-0.09,0.032,-0.11,-0.085,-0.039,0.183,-0.01,-0.119,-0.238,-0.528,-0.474,0.173,-0.092,0.082,-0.48,-0.244,-0.05,-0.34,-0.041,0.076,-0.272,-0.142,-0.031,-0.013,-0.024,0.098,0.007,0.118,-0.147,-0.091,-0.037,-0.036,0.008,-0.273,0.102,0.004,-0.174,0.433,-0.34,-0.454,-0.761,0.028,-0.073,-0.093,0.02,0.107,0.052,0.062,-0.051,-0.076,0.006,-0.067,-0.039,0.098,0.128,0.144,0.045,-0.007,-0.065,0.042,0.022,0.046,-0.012,0.133,0.039,-0.297,0.072,-0.148,-0.217,-0.039,0.145,0.153,-0.03,-0.037,-0.041,-0.068,-0.039,0.073,0.941,0.229,0.0,0.0,0.072,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,38 +-0.192,-0.052,-0.223,-0.029,-0.009,0.172,-0.086,0.432,-0.157,0.11,0.029,-0.103,-0.098,-0.429,-0.421,0.102,-0.025,0.009,-0.172,-0.187,-0.311,-0.411,-0.077,-0.112,-0.231,-0.124,-0.029,0.01,0.026,0.047,-0.055,0.043,0.108,-0.125,0.015,-0.084,-0.176,-0.148,-0.023,0.019,-0.016,-0.11,-0.061,0.056,-0.811,-0.012,-0.052,0.018,0.134,0.037,0.049,0.125,-0.054,0.177,0.055,0.012,-0.06,0.081,0.124,0.071,0.023,-0.07,-0.026,0.012,-0.043,0.017,-0.146,-0.071,0.006,-0.068,0.007,-0.045,0.029,-0.127,0.074,-0.083,0.113,-0.028,-0.056,-0.068,-0.107,0.008,0.0,0.0,0.295,0.0,-0.361,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,39 +-0.668,-0.482,-0.473,-0.326,-0.653,0.519,1.357,-0.594,0.015,0.151,0.377,-0.788,0.07,-0.196,-0.305,0.211,-0.066,0.3,-1.427,0.017,-0.247,-0.151,-0.195,-0.504,0.682,-0.95,-0.067,0.518,0.049,0.264,0.379,-0.054,0.203,-0.01,-0.044,0.027,0.117,-0.251,0.211,-0.32,-0.276,0.085,-3.211,-2.415,-0.652,0.068,-0.052,-0.05,0.048,0.093,0.038,0.18,-0.168,0.245,0.055,-0.081,-0.208,0.123,0.257,0.157,0.22,-0.311,-0.074,0.186,0.104,0.078,-0.042,0.174,0.044,-0.158,-0.013,-0.159,-0.065,-0.36,-0.087,0.239,0.157,-0.112,-0.181,-0.015,-0.28,0.063,0.295,0.0,0.356,0.152,-1.285,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,40 +0.107,0.122,0.223,-0.084,0.189,-0.076,0.033,0.531,0.233,0.02,-0.164,0.119,0.034,0.005,0.005,0.073,0.001,0.025,-0.2,0.001,-0.293,-0.089,-0.028,0.026,-0.248,0.165,0.131,0.052,0.095,-0.018,-0.005,-0.035,-0.026,-0.001,0.061,0.311,-0.121,-0.029,0.125,-0.044,0.006,0.027,0.212,-0.078,0.425,-0.109,0.027,0.05,0.015,-0.125,0.025,-0.018,0.041,-0.028,-0.09,-0.057,0.068,0.064,0.041,-0.07,0.027,0.037,-0.054,0.197,0.059,-0.088,-0.118,-0.015,0.052,0.081,-0.054,-0.037,-0.065,0.085,0.08,0.016,0.015,0.036,0.055,-0.031,0.186,-0.091,0.0,0.117,0.0,0.0,0.276,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,0.0,0.0,41 +-0.206,-0.542,-0.653,-0.901,-0.075,-0.239,0.227,0.278,0.372,0.308,0.0,-0.473,-0.668,-0.167,-0.162,0.057,-0.103,0.271,-0.442,0.046,0.133,0.054,0.014,0.06,0.19,-0.04,0.093,0.024,0.209,0.017,-0.158,0.359,-0.381,-0.014,0.016,0.021,0.027,-0.749,0.092,-0.173,-0.58,0.083,-0.463,-0.249,-0.255,0.001,0.075,0.097,-0.134,0.044,-0.016,-0.016,0.106,-0.207,0.02,-0.087,0.051,0.037,0.08,0.074,0.036,0.037,-0.127,-0.147,0.032,0.023,-0.062,0.042,0.099,0.09,0.148,-0.097,-0.041,-0.062,0.143,0.065,-0.055,-0.068,-0.025,0.105,0.076,0.048,0.0,0.551,0.0,0.0,0.254,0.0,0.0,0.0,0.0,0.0,-0.568,0.0,0.0,0.0,0.0,0.0,0.0,42 +-0.334,-0.359,-0.626,-0.783,-0.08,-0.07,0.181,0.212,0.393,0.258,0.286,-0.149,-0.339,-0.531,-0.424,0.087,-0.152,0.177,-0.49,-0.225,0.15,-0.309,-0.122,0.079,0.133,-0.362,0.014,0.061,0.066,0.043,0.057,0.324,-0.079,-0.041,-0.134,0.006,-0.048,-0.451,0.168,0.054,-0.43,-0.086,-0.263,0.083,-0.182,0.102,-0.134,0.066,-0.081,0.115,0.096,-0.062,-0.029,-0.055,0.012,-0.115,0.011,0.092,0.178,0.223,0.068,-0.038,-0.074,-0.048,0.055,0.086,0.069,0.178,0.06,-0.021,0.207,-0.114,-0.15,0.037,0.197,0.124,0.001,0.097,-0.029,0.154,-0.021,0.156,0.0,0.411,0.0,0.0,0.132,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,43 +-0.279,-0.22,-0.878,-0.711,-0.126,0.111,0.021,0.3,-0.12,0.41,-0.405,-0.277,-0.324,-0.32,-0.181,0.353,-0.12,-0.245,-0.51,0.0,0.023,-0.195,-0.324,-0.027,0.082,-0.268,-0.115,0.184,-0.189,-0.062,0.077,0.262,-0.294,-0.208,0.421,-0.101,-0.098,0.894,0.085,0.09,-0.527,-0.069,-0.254,-0.129,-0.34,0.106,-0.009,0.098,0.029,0.09,0.056,-0.046,-0.044,-0.161,0.083,-0.124,-0.104,-0.111,0.083,0.073,0.027,-0.069,-0.031,0.101,0.006,0.015,-0.076,0.009,0.168,-0.017,0.127,-0.178,-0.007,-0.013,0.295,0.056,-0.098,-0.103,0.075,0.092,-0.121,-0.084,0.0,0.0,1.745,0.0,0.217,0.0,0.0,-0.89,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,44 +-0.349,-0.344,-0.614,-0.846,-0.098,0.071,0.109,0.337,0.118,0.264,0.131,-0.2,-0.423,-0.501,-0.444,0.114,-0.148,0.341,-0.547,-0.199,0.132,-0.307,-0.088,0.113,0.14,-0.222,-0.041,-0.019,0.108,0.036,-0.017,0.251,-0.264,-0.017,-0.025,0.06,-0.057,-0.471,0.067,0.007,-0.505,-0.045,-0.308,-0.376,-0.839,0.102,-0.031,-0.003,0.057,0.128,0.047,0.068,-0.013,-0.114,-0.019,-0.048,0.118,0.039,0.142,0.133,0.057,-0.051,-0.095,0.089,0.061,0.032,0.037,0.13,0.006,-0.042,0.175,-0.093,-0.065,-0.105,0.25,0.148,-0.027,0.01,0.045,0.074,0.001,0.145,0.0,0.302,0.125,0.0,0.048,0.0,0.0,0.0,-0.393,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,45 +-0.165,-0.309,-0.677,-0.709,-0.037,-0.141,0.186,0.355,0.303,0.22,0.271,-0.168,-0.29,-0.427,-0.315,0.064,-0.157,0.146,-0.457,-0.18,-0.055,-0.24,-0.018,0.111,-0.063,-0.249,-0.022,0.008,0.059,-0.014,0.085,0.178,-0.136,-0.081,-0.147,0.027,-0.091,-0.412,0.156,0.023,-0.483,-0.061,-0.264,-0.207,-0.157,0.065,-0.045,0.097,0.005,0.074,0.113,-0.003,0.021,-0.083,0.089,-0.095,0.068,0.066,0.111,0.129,0.049,0.005,-0.022,-0.054,0.026,0.101,0.081,0.101,0.065,-0.037,0.136,-0.06,-0.016,0.09,0.334,0.049,0.003,-0.012,0.009,0.09,-0.026,0.112,0.0,0.329,0.106,0.0,0.163,0.0,0.0,0.0,0.0,-0.474,-0.552,0.0,0.0,0.0,0.0,0.0,0.0,46 +-0.489,-0.4,-0.116,-0.705,-0.115,-0.083,0.033,0.158,0.584,0.031,-0.297,0.005,-0.399,-0.048,-0.905,0.046,0.021,0.136,-0.8,-0.173,-0.469,0.034,0.038,0.082,-0.03,-0.032,-0.251,-0.049,0.372,-0.11,0.772,0.135,-0.444,-0.525,-0.02,-0.063,0.329,1.169,-0.08,-0.091,-0.763,0.02,-1.883,-1.16,-0.808,0.027,0.099,-0.06,0.045,0.15,0.034,0.157,-0.051,-0.008,0.038,-0.075,-0.029,0.128,0.016,0.112,-0.034,-0.15,-0.316,-0.273,0.159,0.066,0.01,0.027,0.088,0.092,0.092,-0.128,0.054,0.027,0.123,0.046,0.053,-0.277,0.079,-0.196,-0.035,0.211,0.0,0.0,0.066,0.484,-0.734,0.0,0.0,0.0,0.0,0.0,-0.552,-0.737,0.0,0.0,0.0,0.0,0.0,47 +-0.447,-0.704,-0.786,-0.861,-0.239,-0.029,-0.291,0.273,0.047,0.366,0.257,-0.554,-0.609,-0.654,-0.513,0.162,-0.132,0.183,-0.785,-0.253,0.234,-0.28,-0.117,0.037,-0.169,-0.196,-0.037,-0.123,0.004,0.104,-0.133,0.416,-0.506,-0.063,-0.075,0.104,0.006,-0.417,0.135,-0.12,-0.638,0.423,-1.026,-0.628,-0.783,0.144,0.016,0.079,-0.031,0.158,0.129,0.023,0.005,-0.316,0.028,-0.092,-0.068,0.044,0.085,0.158,0.104,-0.052,-0.117,0.022,0.031,0.063,0.068,0.141,-0.008,-0.096,0.168,-0.161,-0.259,-0.074,0.341,0.209,-0.135,-0.116,0.123,0.022,-0.037,0.07,0.0,0.584,0.0,0.0,-0.084,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,0.0,-0.441,48 +-0.46,-0.299,-0.616,-0.779,-0.061,0.105,0.145,0.474,0.153,0.224,0.353,-0.212,-0.382,-0.456,-0.373,0.076,-0.101,0.178,-0.444,-0.24,0.027,-0.338,-0.208,0.042,0.103,-0.216,-0.098,-0.018,-0.025,-0.018,-0.045,0.179,-0.037,-0.082,0.015,0.03,-0.102,-0.299,0.082,0.04,-0.455,-0.091,-0.148,-0.199,-0.646,0.106,-0.121,0.037,0.076,0.061,0.094,0.066,0.011,0.015,-0.012,-0.078,0.037,0.083,0.157,0.171,0.032,-0.03,0.005,0.017,0.023,0.027,-0.003,-0.01,0.0,-0.055,0.181,-0.102,-0.024,-0.052,0.276,0.088,0.031,-0.13,0.005,0.006,-0.056,0.156,0.0,0.332,0.055,0.0,-0.271,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,-0.355,0.0,0.0,0.0,0.0,49 +-0.461,-0.551,-0.855,-0.669,-0.508,0.462,1.617,-0.263,0.221,0.206,0.296,-0.992,-0.142,-0.126,-0.262,0.171,-0.133,0.424,-1.429,-0.084,-0.025,-0.002,-0.076,-0.281,1.053,-0.799,-0.09,0.408,0.236,0.366,0.4,0.148,0.038,-0.04,-0.064,0.117,0.157,-0.272,0.233,-0.455,-0.69,0.104,-3.718,-2.914,-0.911,0.017,-0.076,0.061,-0.002,0.152,0.045,0.13,-0.053,0.086,0.07,-0.072,-0.163,0.023,0.13,0.099,0.138,-0.186,-0.122,0.059,0.169,0.165,0.051,0.283,0.136,-0.284,0.208,-0.266,-0.105,-0.25,-0.075,0.228,0.152,-0.177,-0.158,0.121,-0.274,0.208,0.0,0.276,0.002,0.0,-1.044,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,-0.978,0.0,0.0,0.0,50 +-0.551,-0.289,0.186,-0.202,0.049,-0.066,-0.163,0.061,1.256,-0.104,-0.417,0.268,-0.244,0.25,-0.697,-0.107,0.046,0.088,-0.64,-0.005,-0.468,0.207,0.132,0.012,-0.202,-0.174,0.014,-0.134,0.209,-0.269,0.636,-0.091,-0.216,-0.57,-0.032,-0.133,0.223,1.19,0.063,-0.05,-0.333,0.088,-1.479,-0.947,-0.772,-0.047,0.089,0.107,-0.124,0.072,-0.027,0.077,0.003,0.003,0.064,-0.061,-0.033,0.282,-0.059,0.097,0.043,-0.048,-0.233,-0.246,0.134,0.051,0.087,0.076,0.116,0.203,0.053,-0.096,0.077,0.156,0.2,0.092,0.076,-0.244,0.093,-0.208,0.03,0.099,0.0,0.0,0.291,0.493,-0.729,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,0.0,51 +-0.457,-0.423,0.287,-0.14,-0.311,0.087,0.344,0.149,0.65,-0.083,-0.189,-0.231,-0.023,0.393,-0.621,-0.098,-0.003,0.218,-1.327,-0.002,-0.447,0.409,0.085,-0.04,0.27,-0.06,-0.037,-0.108,0.398,-0.102,0.748,-0.1,-0.131,-0.426,0.076,-0.039,0.336,1.595,0.064,-0.243,-0.428,0.117,-3.496,-2.901,-0.255,-0.014,0.216,0.022,0.115,0.021,0.052,0.056,0.063,0.052,0.046,-0.075,-0.105,0.201,-0.014,0.081,0.116,-0.134,-0.244,-0.12,0.178,0.045,0.004,0.174,0.173,0.128,0.152,-0.146,0.081,-0.172,0.083,0.099,0.13,-0.316,0.076,-0.237,0.048,0.057,0.0,0.0,0.0,0.816,-1.36,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,-0.978,0.0,0.0,0.0,52 +-0.578,-0.188,0.476,0.041,-0.272,-0.156,0.137,0.121,1.378,0.124,-0.432,0.043,-0.217,0.666,-0.46,-0.136,0.484,0.172,-0.407,0.142,-0.638,0.534,-0.247,0.36,-0.043,-0.177,-0.109,-0.242,0.104,-0.397,0.782,-0.14,-0.45,-0.789,-0.01,-0.021,0.366,1.719,-0.074,-0.104,-0.503,0.309,-1.734,-1.097,-0.231,0.1,0.16,0.082,0.109,-0.015,0.03,0.036,-0.079,0.071,-0.02,-0.094,-0.043,0.14,-0.07,0.114,0.049,-0.211,-0.262,-0.231,0.172,0.016,0.008,0.084,0.146,0.119,0.001,-0.054,0.009,0.029,0.105,0.03,0.385,-0.279,0.133,-0.367,0.012,0.271,0.0,0.0,0.0,0.473,-3.772,0.0,0.0,0.0,0.0,0.0,0.0,-0.797,0.0,0.0,0.0,0.0,0.0,53 +-1.195,-0.559,-0.127,-0.156,-0.522,0.565,0.283,-0.002,-0.294,0.415,-0.078,-0.249,-0.511,-0.316,-0.398,0.007,-0.041,0.235,-0.49,0.473,-0.373,-0.31,-0.495,-0.591,-0.342,-1.06,0.127,-0.088,-0.277,-0.266,-0.069,-0.489,0.422,-0.253,-0.193,0.1,0.306,0.329,0.587,-0.251,1.089,0.256,-0.77,-0.507,-0.889,0.209,-0.313,0.282,-0.442,0.428,0.02,0.322,-0.254,0.51,0.237,0.334,-0.376,0.248,0.016,0.286,0.387,-0.026,0.011,-0.275,0.305,0.349,0.074,0.517,-0.153,-0.068,-0.186,-0.177,0.018,-0.125,0.438,0.593,0.018,-0.365,-0.641,-0.194,-0.549,0.13,0.0,0.0,0.0,0.76,-2.155,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,54 +-1.014,-0.565,-0.639,-0.373,-0.432,0.379,0.58,0.147,-0.154,0.386,-0.052,-0.229,-0.59,-0.186,-0.327,-0.067,-0.134,0.448,-0.389,0.3,-0.115,-0.3,-0.545,-0.413,0.091,-0.945,0.013,-0.168,-0.201,-0.092,-0.062,-0.297,0.277,-0.32,-0.126,0.082,0.216,-0.053,0.476,-0.13,0.207,0.165,-0.836,-0.555,-0.959,0.173,-0.111,0.189,-0.149,0.247,0.063,0.134,-0.274,0.399,0.227,0.152,-0.163,0.172,0.013,0.247,0.254,-0.155,-0.073,-0.147,0.258,0.206,0.124,0.353,0.039,-0.047,-0.019,-0.124,0.043,-0.099,0.372,0.384,0.0,-0.326,-0.292,-0.239,-0.255,0.267,0.0,0.0,0.0,0.794,-1.733,-0.698,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,55 +-1.777,-0.869,-0.197,-0.257,-0.858,1.088,-2.069,0.011,-0.162,0.685,0.241,-0.649,-0.83,-0.719,-0.789,0.162,0.054,0.431,-0.986,0.655,-0.259,-0.605,-0.667,-0.591,-2.376,-0.786,0.148,0.101,-0.336,0.147,-0.046,-0.465,0.296,-0.286,-0.063,0.073,0.451,0.36,0.791,-0.49,1.055,0.4,-1.651,-1.107,-1.319,0.398,-0.5,0.391,-0.278,0.424,0.249,0.452,-0.479,0.549,0.339,0.357,-0.587,0.319,0.012,0.457,0.551,-0.251,-0.017,-0.219,0.457,0.287,0.147,0.528,-0.153,-0.135,-0.341,-0.203,-0.037,-0.498,0.374,0.729,-0.213,-0.648,-0.732,-0.46,-0.773,0.27,0.0,0.0,0.0,0.471,-3.253,0.0,-0.989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,56 +-1.458,-0.478,-0.308,-0.359,-0.827,0.667,-0.089,-0.466,-0.302,0.553,-0.316,-0.331,-0.311,-0.55,-0.562,0.249,-0.102,-0.246,-0.647,0.531,0.064,-0.416,-0.856,-0.716,-0.708,-1.278,0.067,0.195,-0.349,0.095,-0.147,-0.376,0.205,-0.399,0.395,-0.296,0.436,1.007,0.696,0.102,0.655,0.135,-0.962,-0.519,-1.211,0.273,-0.476,0.242,-0.216,0.341,0.153,0.318,-0.542,0.439,0.272,0.189,-0.635,0.165,0.174,0.302,0.353,-0.236,-0.065,0.26,0.301,0.255,0.086,0.44,-0.201,-0.134,-0.274,-0.347,-0.135,-0.325,0.201,0.557,0.145,-0.344,-0.705,-0.279,-0.69,0.022,0.0,0.0,1.025,0.0,-3.368,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,57 +-1.085,-0.544,-0.205,-0.24,-0.576,0.629,0.054,-0.254,-0.386,0.269,-0.077,-0.305,-0.364,-0.49,-0.572,-0.058,-0.181,0.43,-0.638,0.332,-0.223,-0.453,-0.387,-0.531,-0.303,-1.017,0.027,-0.078,-0.247,-0.05,-0.18,-0.281,0.175,-0.3,0.007,0.103,0.134,0.187,0.483,-0.165,0.735,0.128,-0.758,-0.401,-0.898,0.211,-0.195,0.169,-0.078,0.309,0.152,0.171,-0.313,0.353,0.301,0.185,-0.515,0.252,0.087,0.217,0.307,-0.242,-0.005,-0.046,0.211,0.209,0.169,0.283,-0.154,-0.056,-0.23,-0.197,-0.092,-0.146,0.328,0.544,-0.053,-0.277,-0.483,-0.412,-0.61,0.193,0.0,0.0,0.016,0.728,-2.291,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,58 +-0.996,-0.558,-0.074,-0.227,-0.634,0.565,-0.026,-0.016,-0.616,0.228,-0.354,-0.395,-0.458,-0.274,-0.302,-0.038,-0.052,0.253,-0.467,0.324,-0.496,-0.42,-0.603,-0.558,-0.412,-0.594,0.024,-0.18,-0.246,-0.151,-0.193,-0.441,0.265,-0.325,-0.029,0.041,0.155,0.011,0.342,-0.208,0.876,0.226,-0.744,-0.509,-0.809,0.119,-0.096,0.145,-0.012,0.178,0.047,0.089,-0.315,0.377,0.116,0.15,-0.362,0.16,0.034,0.145,0.289,-0.224,-0.098,-0.015,0.185,0.146,0.081,0.274,-0.088,-0.109,-0.261,-0.172,0.019,-0.28,0.359,0.416,-0.1,-0.52,-0.281,-0.555,-0.527,0.14,0.0,0.0,0.0,0.843,-2.144,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,0.0,-0.521,0.0,59 +-0.923,-0.519,-0.308,-0.396,-0.442,0.446,0.338,0.15,-0.421,0.313,-0.152,-0.231,-0.544,-0.212,-0.217,-0.04,-0.042,0.347,-0.425,0.386,-0.239,-0.325,-0.519,-0.438,0.039,-0.855,0.034,-0.129,-0.145,-0.144,-0.081,-0.319,0.259,-0.185,-0.075,0.087,0.23,-0.082,0.382,-0.145,0.51,0.143,-0.723,-0.498,-0.741,0.156,-0.085,0.125,-0.163,0.258,0.037,0.124,-0.225,0.33,0.131,0.168,-0.232,0.14,-0.059,0.165,0.289,-0.163,-0.097,-0.116,0.217,0.117,0.069,0.354,-0.032,-0.13,-0.063,-0.151,0.111,-0.222,0.45,0.335,0.003,-0.41,-0.247,-0.366,-0.35,0.204,0.0,0.0,0.0,0.798,-1.924,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,0.0,-0.521,0.0,60 +-1.061,-0.629,-0.071,-0.176,-0.238,0.802,0.042,-0.148,0.658,0.029,-0.137,-0.241,-0.459,-0.229,-0.98,-0.085,-0.154,0.264,-0.444,0.347,-0.504,-0.274,-0.326,-0.067,-0.212,-0.76,-0.109,-0.34,0.43,-0.227,0.577,-0.33,0.19,-0.822,0.066,0.048,0.374,0.338,0.296,-0.373,-0.176,0.135,-0.955,-0.502,-0.685,0.011,-0.073,0.091,-0.438,0.187,0.119,0.114,-0.161,0.349,0.17,0.069,-0.26,0.365,-0.007,0.216,0.277,-0.176,-0.125,-0.161,0.183,0.096,0.037,0.179,0.135,0.019,-0.004,-0.176,0.043,-0.078,0.495,0.336,0.413,-0.242,-0.216,-0.585,-0.295,0.336,0.0,0.0,0.443,0.645,-3.817,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,-0.521,0.0,61 +-1.168,-0.64,-0.389,-0.436,-0.597,0.681,0.148,-0.38,-0.235,0.381,0.023,-0.423,-0.536,-0.5,-0.545,0.014,-0.138,0.327,-0.675,0.473,-0.123,-0.355,-0.515,-0.514,-0.189,-1.119,0.083,-0.008,-0.189,-0.01,-0.05,-0.177,0.282,-0.422,-0.07,-0.053,0.215,0.136,0.465,-0.1,0.521,0.635,-0.907,-0.553,-1.041,0.098,-0.377,0.177,-0.293,0.407,0.119,0.234,-0.353,0.283,0.316,0.168,-0.531,0.395,0.11,0.286,0.386,-0.237,0.018,0.052,0.146,0.228,0.235,0.154,-0.047,-0.086,-0.309,-0.16,-0.328,-0.245,0.328,0.469,-0.026,-0.171,-0.509,-0.327,-0.609,0.314,0.391,0.0,0.0,0.788,-2.127,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.521,-0.441,62 +-1.353,-0.785,-0.146,-0.234,-0.624,0.543,0.315,-0.129,-0.384,0.357,-0.03,-0.482,-0.664,-0.378,-0.567,-0.074,-0.161,0.308,-0.763,0.452,-0.225,-0.411,-0.407,-0.589,-0.152,-1.108,0.01,-0.206,-0.188,-0.089,-0.177,-0.414,0.343,-0.486,-0.099,-0.017,0.195,0.103,0.428,-0.256,0.866,0.334,-0.989,-0.612,-1.092,0.144,-0.49,0.251,-0.216,0.369,0.087,0.297,-0.291,0.45,0.36,0.342,-0.536,0.285,0.081,0.334,0.33,-0.187,-0.065,0.028,0.203,0.322,0.317,0.329,-0.152,-0.094,-0.17,-0.17,-0.095,-0.338,0.383,0.545,-0.015,-0.292,-0.733,-0.375,-0.644,0.283,0.0,0.0,0.0,0.764,-3.771,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,-0.521,0.0,63 +-1.248,-0.464,-0.196,-0.23,-0.753,0.719,1.009,-0.418,-0.143,0.21,0.09,-0.676,-0.21,-0.105,-0.183,-0.081,-0.175,0.377,-1.69,0.464,-0.35,-0.226,-0.406,-0.492,0.58,-1.016,-0.118,0.205,-0.105,0.039,0.191,-0.428,0.285,-0.209,-0.106,0.183,0.384,-0.056,0.395,-0.361,0.475,0.147,-2.983,-2.557,-0.928,0.149,-0.075,0.098,-0.338,0.244,0.099,0.119,-0.324,0.256,0.225,0.125,-0.321,0.199,0.068,0.217,0.354,-0.351,-0.081,0.053,0.228,0.119,0.018,0.187,0.022,-0.154,-0.152,-0.166,-0.047,-0.465,0.286,0.481,-0.028,-0.24,-0.315,-0.399,-0.657,0.249,0.0,0.0,0.0,0.688,-2.846,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,-0.521,0.0,64 +-1.798,-0.774,-0.037,-0.223,-0.871,0.742,-1.99,-0.291,-0.277,0.346,0.064,-0.82,-0.454,-0.725,-0.729,0.109,-0.061,0.379,-0.953,0.578,-0.124,-0.506,-0.656,-0.685,-1.21,-0.75,0.131,0.142,-0.47,0.099,0.06,-0.423,0.331,-0.317,-0.139,-0.009,0.484,0.219,0.499,-0.354,0.878,0.328,-1.566,-1.154,-1.433,0.304,-0.303,0.253,0.112,0.349,0.146,0.241,-0.367,0.665,0.266,0.196,-0.505,0.174,0.076,0.132,0.476,-0.44,-0.049,-0.109,0.41,0.232,0.101,0.409,-0.345,-0.099,-0.328,-0.267,0.027,-0.701,0.273,0.628,-0.179,-0.486,-0.563,-0.245,-0.636,0.271,0.0,0.048,0.0,0.106,-3.231,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,-0.521,0.0,65 +-0.18,-0.27,-0.254,-0.523,0.101,0.046,-0.103,-0.048,-0.027,0.252,0.077,0.043,-0.251,-0.474,-0.545,0.169,0.049,0.089,-0.466,-0.126,0.175,-0.253,-0.01,-0.035,-0.159,0.046,-0.362,0.074,-0.032,0.086,-0.102,0.141,-0.264,-0.079,0.041,-0.165,-0.039,0.059,-0.024,0.041,-0.176,0.422,-0.22,-0.473,-1.178,0.037,-0.045,0.011,0.199,0.029,0.156,0.16,-0.159,-0.186,0.024,-0.025,-0.066,0.189,0.099,0.126,0.107,-0.006,-0.046,0.005,0.005,0.05,0.013,0.054,0.033,-0.026,0.111,-0.061,-0.295,-0.006,0.125,0.138,0.023,-0.14,-0.028,0.086,-0.036,-0.051,0.0,0.236,0.0,0.0,0.154,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.441,66 +-0.673,-0.433,0.19,-0.337,-0.192,-0.022,-0.248,-0.291,0.613,0.059,-0.458,-0.051,-0.389,0.11,-0.792,0.114,0.185,0.09,-0.831,-0.13,-0.469,0.184,0.044,0.132,-0.188,0.109,-0.015,-0.095,0.238,-0.125,0.622,0.038,-0.419,-0.554,-0.009,-0.118,0.375,1.015,-0.095,-0.054,-0.558,0.568,-2.032,-1.328,-0.879,0.14,0.061,0.093,0.112,0.001,0.044,0.146,-0.144,-0.25,-0.002,-0.069,-0.194,0.209,0.077,0.113,0.01,-0.192,-0.256,-0.159,0.153,0.033,0.043,0.118,0.055,-0.201,-0.014,-0.158,-0.153,-0.078,0.159,0.059,0.116,-0.311,0.069,-0.239,-0.061,0.206,0.0,0.0,0.034,0.526,-2.078,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,0.0,0.0,-0.441,67 +-0.437,-0.469,-0.491,-0.667,-0.417,0.38,1.204,-0.121,0.276,0.148,0.353,-1.153,0.027,-0.213,-0.242,0.24,-0.009,0.365,-1.408,0.012,0.031,0.008,-0.221,-0.496,0.647,-0.446,0.06,0.391,0.083,0.262,0.18,0.0,-0.027,-0.096,-0.003,0.176,0.154,0.3,0.154,-0.419,-0.406,0.659,-3.392,-2.635,-0.773,-0.02,-0.167,0.085,-0.125,0.03,0.049,0.119,-0.224,0.008,0.045,-0.083,-0.272,0.281,0.334,0.085,0.242,-0.237,-0.078,-0.103,0.094,0.102,0.05,0.247,-0.021,-0.166,0.246,-0.227,-0.258,-0.332,-0.1,0.308,0.146,-0.246,-0.17,0.009,-0.344,-0.044,0.0,0.107,0.074,0.0,-1.042,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,-0.441,68 +-0.341,-0.388,-0.306,-0.763,-0.174,0.003,-0.211,-0.529,0.163,0.313,-0.157,-0.449,-0.457,-0.556,-0.399,0.223,-0.044,0.219,-0.552,-0.059,0.315,-0.138,-0.129,0.069,-0.154,-0.268,0.038,0.0,0.073,0.104,-0.081,0.261,-0.377,-0.247,0.02,-0.104,0.162,-0.038,0.033,-0.151,-0.357,0.807,-0.858,-0.403,-0.191,0.027,-0.117,0.084,0.23,0.054,0.08,0.078,-0.166,-0.255,-0.025,-0.007,-0.271,0.155,0.098,0.136,0.157,-0.187,-0.027,-0.035,0.052,0.061,0.098,0.038,-0.027,-0.133,0.106,-0.159,-0.244,-0.226,-0.078,0.119,0.061,0.061,-0.005,-0.031,-0.122,0.167,0.547,0.385,0.0,0.0,0.111,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.505,69 +-0.371,-0.082,-0.138,0.079,-0.078,0.121,-0.079,0.078,-0.161,0.047,-0.074,-0.114,-0.072,-0.233,-0.238,0.038,-0.009,0.045,-0.147,-0.073,-0.221,-0.315,-0.107,-0.08,-0.016,-0.131,0.038,-0.095,-0.005,-0.044,-0.114,-0.004,0.068,-0.091,0.043,0.039,-0.143,0.004,-0.01,0.07,0.122,-0.117,0.026,0.05,-0.172,0.031,-0.047,-0.004,0.026,0.014,0.021,0.058,-0.004,0.225,-0.042,-0.055,0.044,0.077,0.05,-0.005,0.052,0.05,-0.024,0.019,-0.026,-0.026,-0.06,-0.001,0.048,-0.092,0.082,0.009,0.045,-0.081,0.158,0.058,0.042,-0.169,0.073,-0.037,-0.049,0.021,0.0,0.0,0.099,0.0,-0.626,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,70 +-0.504,0.107,-0.367,0.119,0.007,0.197,0.031,0.111,-0.13,0.212,-0.512,-0.027,0.021,-0.164,-0.192,0.183,-0.094,-0.357,-0.153,0.026,-0.25,-0.224,-0.268,-0.081,-0.068,-0.152,0.03,0.093,-0.207,-0.127,-0.134,0.075,0.04,-0.289,0.488,-0.05,-0.151,1.347,0.085,0.017,0.115,-0.073,0.071,-0.057,-0.418,0.043,-0.054,0.043,-0.061,0.057,-0.106,0.051,0.032,0.022,0.148,-0.136,-0.083,0.067,-0.041,0.163,0.005,-0.014,-0.024,-0.031,0.01,-0.007,-0.056,-0.014,0.235,0.299,0.135,-0.156,-0.116,-0.013,0.172,-0.027,0.032,0.007,-0.038,0.118,-0.072,-0.12,1.21,0.0,1.738,0.0,-0.36,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,71 +-0.389,-0.079,-0.181,-0.091,-0.098,0.128,-0.064,0.038,-0.127,0.047,-0.16,-0.001,-0.075,-0.32,-0.272,-0.14,-0.078,0.171,-0.213,-0.146,-0.148,-0.298,-0.051,-0.047,-0.083,-0.188,0.105,-0.107,-0.04,-0.07,-0.13,-0.019,0.15,-0.159,0.011,0.069,-0.138,-0.132,0.156,0.04,0.026,-0.062,-0.045,-0.352,-0.688,0.07,-0.081,0.061,-0.161,0.153,0.003,0.048,0.005,0.211,0.106,-0.039,0.001,0.121,0.019,0.172,0.092,-0.022,-0.057,-0.012,0.017,0.074,-0.002,0.114,0.115,0.194,0.078,-0.044,-0.236,-0.078,0.25,0.127,0.058,-0.03,-0.026,-0.076,-0.049,0.133,0.0,0.0,0.309,0.078,-0.332,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,0.0,72 +-0.617,-0.383,0.328,0.027,-0.325,-0.035,-0.197,0.01,0.774,-0.094,-0.376,-0.08,-0.309,0.045,-0.834,-0.056,-0.002,0.086,-0.565,-0.114,-0.449,-0.028,-0.028,0.008,-0.158,-0.052,-0.204,-0.306,0.265,-0.153,0.674,-0.042,-0.184,-0.443,-0.018,0.049,0.282,1.3,0.015,-0.016,-0.458,0.063,-1.468,-0.774,-0.407,0.058,-0.06,0.017,0.023,0.07,0.099,0.173,-0.077,0.166,0.003,-0.066,-0.136,0.178,0.097,0.103,0.043,-0.144,-0.216,-0.105,0.105,0.038,0.005,0.073,0.025,-0.008,0.141,-0.096,-0.087,0.005,0.075,0.142,0.163,-0.21,0.115,-0.252,-0.12,0.224,0.0,0.0,0.0,0.838,-2.164,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,-0.355,0.0,0.0,0.0,0.0,73 +-0.536,-0.391,-0.28,-0.598,-0.199,0.107,-0.199,-0.2,0.003,0.175,-0.073,-0.42,-0.311,-0.702,-0.615,0.089,-0.077,0.158,-0.426,-0.22,0.053,-0.401,-0.322,-0.072,-0.188,-0.416,-0.053,-0.085,0.04,0.196,-0.125,0.103,-0.066,-0.146,0.035,-0.002,0.041,-0.375,0.165,-0.042,-0.2,0.395,-0.558,-0.236,-0.784,0.044,-0.201,0.068,0.016,0.133,0.148,0.201,-0.162,0.093,0.055,-0.036,-0.195,0.181,0.211,0.139,0.116,-0.153,-0.038,0.166,0.014,0.066,0.047,0.141,0.006,-0.178,0.152,-0.098,-0.279,-0.211,-0.099,0.23,0.02,-0.082,-0.121,-0.042,-0.139,0.193,0.0,0.117,0.028,0.0,-0.351,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,0.0,0.0,-0.441,74 +-0.728,-0.382,-0.223,-0.468,-0.555,0.559,1.396,-0.174,-0.165,0.123,0.291,-1.006,0.163,-0.105,-0.196,0.131,-0.085,0.402,-1.363,-0.029,-0.224,-0.033,-0.301,-0.388,0.85,-0.624,-0.133,0.392,0.07,0.273,0.355,-0.033,0.203,-0.102,0.037,0.152,0.11,-0.239,0.139,-0.425,-0.332,0.044,-3.112,-2.64,-0.076,0.059,-0.088,0.024,0.014,0.08,0.086,0.131,-0.165,0.148,0.015,0.024,-0.231,0.154,0.195,0.07,0.185,-0.283,-0.069,0.137,0.004,0.071,0.041,0.226,0.002,-0.29,0.09,-0.052,-0.186,-0.366,-0.174,0.156,0.34,-0.211,-0.05,-0.13,-0.324,0.045,0.721,0.0,0.434,0.219,-1.919,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,-0.978,0.0,0.0,0.0,75 +-0.536,-0.122,0.072,-0.286,-0.041,0.242,0.064,-0.436,0.196,0.073,-0.169,-0.174,-0.098,-0.464,-0.387,-0.114,-0.094,0.14,-0.048,0.047,-0.069,-0.231,-0.139,-0.165,-0.108,-0.267,0.095,-0.073,0.055,-0.088,-0.238,-0.118,0.295,-0.184,-0.107,-0.007,0.049,-0.422,0.2,0.045,0.06,0.108,-0.099,-0.231,-0.384,0.066,-0.206,0.133,-0.294,0.122,0.0,0.124,-0.079,0.101,0.069,0.114,-0.131,0.183,0.121,0.275,0.165,-0.091,0.04,-0.001,0.082,0.109,0.165,0.136,0.047,0.181,0.22,-0.055,-0.235,-0.203,-0.091,0.133,0.156,-0.08,-0.133,-0.001,-0.168,0.191,0.0,0.0,0.174,0.174,-2.705,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.49,0.0,0.0,0.0,0.0,76 +-0.487,-0.236,-0.26,-0.077,-0.278,0.403,1.262,0.046,-0.082,0.127,0.32,-0.838,0.117,0.102,-0.053,0.229,-0.064,0.376,-1.386,0.026,-0.17,0.203,-0.059,-0.318,0.868,-0.294,-0.059,0.471,0.07,0.251,0.398,-0.052,0.03,0.062,0.037,0.018,0.155,0.098,0.01,-0.422,-0.259,0.085,-2.957,-2.246,-0.361,0.058,0.049,-0.145,0.163,-0.098,0.034,0.069,-0.133,0.011,-0.017,-0.007,-0.161,0.147,0.157,0.033,0.165,-0.23,-0.059,0.026,0.039,-0.027,-0.033,0.163,0.039,-0.408,0.15,-0.108,-0.025,-0.273,-0.032,0.141,0.281,-0.378,-0.072,-0.021,-0.227,0.027,0.317,0.0,0.25,0.116,-1.28,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,0.0,0.0,0.0,77 +-0.508,-0.141,-0.15,-0.338,-0.46,0.593,1.672,-0.519,0.169,0.08,0.314,-0.938,0.264,0.222,0.113,0.011,-0.092,0.411,-1.597,0.186,-0.123,0.379,-0.197,-0.265,1.129,-0.794,-0.061,0.494,0.072,0.18,0.459,-0.08,0.077,0.052,0.055,0.172,0.219,0.282,0.176,-0.457,-0.255,0.257,-4.12,-4.323,-0.511,0.074,0.033,0.01,0.003,0.048,0.023,-0.008,-0.074,0.122,0.066,-0.027,-0.204,0.103,0.048,0.113,0.227,-0.188,-0.184,0.02,0.052,0.086,0.014,0.202,0.08,-0.163,0.13,-0.148,-0.155,-0.399,-0.207,0.304,0.3,-0.155,-0.033,0.017,-0.236,0.081,0.359,0.0,0.381,0.267,-1.351,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.998,0.0,0.0,0.0,78 +-0.621,-0.19,-0.326,-0.209,-0.242,0.18,-2.156,-0.132,1.279,0.006,0.423,-0.163,0.246,-0.902,-0.748,-0.181,-0.143,0.228,-0.821,-0.164,-0.212,-0.454,-0.034,0.031,-0.567,-0.094,0.053,0.152,-0.055,-0.074,0.287,0.079,0.237,-0.074,0.141,0.161,0.209,-0.017,0.221,-0.149,-0.257,-0.005,-1.176,-0.566,-0.401,-0.091,-0.04,0.079,-0.337,0.191,0.08,0.047,0.075,0.156,0.005,-0.16,-0.053,0.236,-0.007,0.132,0.217,-0.089,-0.229,-0.123,0.139,0.065,0.084,0.097,0.188,0.306,0.24,-0.014,-0.071,-0.076,0.082,0.278,0.012,-0.046,0.064,-0.023,0.022,0.287,0.0,0.265,0.0,0.0,-0.81,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,79 +-0.776,-0.157,-0.466,-0.378,-0.448,0.443,-2.359,-0.198,0.067,0.269,-0.052,-0.641,0.111,-0.899,-0.749,0.371,-0.144,-0.142,-0.851,0.08,-0.07,-0.478,-0.419,-0.278,-0.714,0.026,-0.013,0.513,-0.093,0.082,0.312,0.094,0.039,-0.226,0.372,-0.046,0.095,1.284,0.273,-0.13,-0.117,0.102,-1.305,-0.852,-0.692,0.119,-0.058,0.047,-0.017,0.099,0.095,-0.002,-0.066,0.248,0.0,-0.217,-0.236,0.051,0.108,0.142,0.246,-0.223,-0.08,0.176,0.121,0.152,0.01,0.221,0.077,-0.057,0.177,-0.183,-0.075,-0.167,0.002,0.281,-0.05,-0.08,-0.007,0.033,-0.272,0.001,0.0,0.0,1.418,0.0,-0.363,0.0,0.0,-0.89,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,80 +-0.707,-0.337,-0.3,-0.353,-0.4,0.386,-2.203,0.085,0.093,0.003,0.254,-0.891,0.085,-0.85,-0.769,0.107,-0.207,0.365,-0.938,-0.101,-0.046,-0.471,0.013,-0.183,-0.664,-0.039,-0.004,0.237,0.125,0.184,0.489,0.145,0.158,-0.067,-0.027,0.102,0.259,-0.308,0.312,-0.296,-0.191,0.139,-1.712,-1.059,-0.887,0.035,-0.077,0.028,-0.044,0.165,0.145,0.017,-0.005,0.249,0.083,-0.141,-0.17,0.142,0.081,0.197,0.172,-0.101,-0.183,-0.103,0.091,0.084,0.105,0.231,0.069,-0.153,0.261,-0.154,-0.13,-0.098,0.041,0.292,-0.04,-0.162,-0.071,-0.096,-0.178,0.182,0.0,0.197,0.0,0.0,-1.231,0.0,0.0,0.0,-0.393,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,81 +-0.605,-0.293,-0.312,-0.104,-0.246,0.311,-2.116,0.196,0.035,0.026,0.305,-0.706,0.164,-0.857,-0.741,0.123,-0.234,0.216,-0.897,-0.158,-0.214,-0.442,-0.002,-0.116,-0.819,0.076,0.006,0.279,-0.011,0.171,0.492,0.152,0.143,-0.097,0.03,0.089,0.19,-0.019,0.237,-0.258,-0.153,0.074,-1.589,-1.051,-0.876,0.084,-0.05,0.03,-0.194,0.11,0.118,0.192,0.033,0.205,0.071,-0.148,-0.032,0.116,0.112,0.163,0.155,-0.065,-0.217,-0.144,0.112,0.115,0.168,0.121,0.091,-0.249,0.2,-0.069,0.132,0.055,0.109,0.211,0.003,-0.216,-0.01,-0.075,-0.011,0.168,0.0,0.218,0.0,0.0,-0.67,0.0,0.0,0.0,0.0,-0.474,0.0,0.0,0.0,0.0,-0.975,0.0,0.0,82 +-0.738,-0.731,-0.655,-0.697,-0.426,0.286,-1.861,-0.138,0.078,0.22,0.356,-1.087,-0.27,-0.916,-0.811,0.17,-0.195,0.352,-0.927,-0.178,0.069,-0.345,0.014,-0.255,-0.499,-0.059,-0.015,0.245,0.108,0.177,0.456,0.409,-0.032,-0.065,0.052,0.086,0.312,-0.275,0.292,-0.453,-0.57,0.083,-1.919,-0.996,-1.123,0.077,-0.061,0.101,-0.157,0.16,0.152,0.147,0.036,0.142,0.163,-0.162,-0.104,0.099,0.124,0.278,0.157,-0.123,-0.212,-0.198,0.158,0.146,0.054,0.25,0.125,-0.324,0.355,-0.236,-0.082,-0.096,0.062,0.338,0.006,-0.224,-0.077,0.094,-0.064,0.271,0.0,0.555,0.0,0.0,-0.443,0.0,0.0,0.0,0.0,0.0,-0.552,0.0,0.0,0.0,-0.975,0.0,0.0,83 +-1.152,-0.588,0.084,-0.38,-0.492,0.222,-2.349,-0.002,0.757,-0.025,-0.159,-0.607,-0.241,-0.593,-1.239,-0.015,-0.143,0.21,-1.2,-0.167,-0.36,-0.206,0.05,-0.115,-0.705,0.027,-0.194,-0.187,0.442,-0.105,0.954,0.049,-0.015,-0.45,0.0,0.019,0.567,0.744,0.211,-0.311,-0.488,0.147,-3.472,-2.665,-1.062,0.125,-0.039,0.071,-0.038,0.185,0.133,0.031,-0.027,0.205,0.184,-0.117,-0.158,0.199,0.017,0.23,0.164,-0.149,-0.355,-0.216,0.206,0.133,0.11,0.232,0.073,-0.023,0.107,-0.191,-0.037,-0.119,0.093,0.271,0.003,-0.363,0.043,-0.256,-0.155,0.221,0.0,0.0,0.0,0.288,-1.685,0.0,0.0,0.0,0.0,0.0,0.0,-0.737,0.0,0.0,-0.975,0.0,0.0,84 +-0.803,-0.653,-0.504,-0.604,-0.514,0.258,-2.209,-0.517,0.179,0.14,0.197,-1.1,-0.049,-1.135,-0.935,0.143,-0.254,0.196,-0.934,-0.255,0.105,-0.425,0.014,-0.29,-0.837,-0.237,0.012,0.123,0.016,0.216,0.374,0.314,-0.04,-0.135,0.006,0.127,0.35,-0.158,0.401,-0.405,-0.348,0.699,-2.264,-1.585,-0.843,0.125,-0.14,0.073,-0.058,0.227,0.155,0.05,-0.22,0.155,0.193,-0.093,-0.22,0.173,0.112,0.247,0.233,-0.138,-0.125,-0.135,0.152,0.193,0.144,0.223,0.035,-0.066,0.133,-0.189,-0.256,-0.149,-0.035,0.373,-0.139,-0.217,-0.146,0.09,-0.13,0.214,0.0,0.491,0.0,0.0,-0.896,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.975,0.0,-0.441,85 +-0.73,-0.322,-0.253,-0.342,-0.388,0.508,-2.1,-0.006,0.234,0.103,0.403,-0.696,0.055,-1.004,-0.885,0.132,-0.123,0.226,-0.791,-0.138,-0.065,-0.477,-0.251,-0.305,-0.708,-0.167,0.075,0.155,0.022,0.19,0.301,0.03,0.245,-0.065,0.019,0.137,0.128,-0.305,0.214,-0.228,-0.258,0.02,-1.391,-0.771,-0.603,0.096,-0.188,0.03,-0.043,0.091,0.184,0.135,-0.08,0.364,0.106,-0.153,-0.104,0.255,0.204,0.163,0.227,-0.177,-0.071,-0.094,0.145,0.098,0.09,0.236,-0.009,-0.038,0.155,-0.115,-0.093,-0.165,-0.079,0.256,0.05,0.007,-0.14,-0.077,-0.106,0.176,0.306,0.175,0.0,0.0,-1.83,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.355,0.0,-0.975,0.0,0.0,86 +-1.252,-0.663,-0.436,-0.408,-0.862,0.414,0.023,-0.091,0.002,0.118,0.347,-1.537,0.113,-0.505,-0.538,0.07,-0.18,0.344,-1.779,-0.061,-0.102,-0.179,-0.379,-0.627,0.129,-0.293,0.0,0.408,0.285,0.315,0.473,0.151,0.102,-0.116,-0.028,0.15,0.251,-0.103,0.353,-0.889,-0.338,0.181,-5.082,-4.828,-0.967,0.125,-0.044,0.085,-0.088,0.242,0.138,-0.007,-0.272,0.308,0.192,-0.065,-0.209,0.14,0.048,0.231,0.297,-0.426,-0.209,-0.136,0.164,0.189,0.119,0.396,0.05,-0.202,0.124,-0.213,-0.114,-0.526,-0.098,0.465,0.133,-0.386,-0.248,0.123,-0.335,0.161,0.0,0.083,0.0,0.0,-2.417,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.978,-0.975,0.0,0.0,87 +-0.615,-0.289,-0.08,-0.332,-0.355,0.429,-2.292,0.108,-0.11,0.023,0.466,-1.194,0.271,-0.69,-0.63,0.122,0.01,0.291,-1.013,0.119,-0.108,-0.207,-0.169,-0.278,-0.456,0.291,-0.011,0.379,0.035,0.201,0.496,0.095,0.16,-0.006,0.083,0.15,0.266,-0.4,0.184,-0.519,-0.168,0.321,-1.872,-1.179,-0.319,0.077,-0.003,-0.043,0.073,0.056,0.169,-0.005,-0.037,0.348,0.04,-0.146,-0.158,-0.011,0.078,0.066,0.219,-0.234,-0.241,-0.06,0.06,0.099,0.051,0.188,0.018,-0.175,0.223,-0.113,0.007,-0.194,-0.038,0.142,-0.01,-0.195,0.073,-0.071,-0.066,0.072,0.0,0.228,0.0,0.0,-1.4,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.981,0.0,0.0,88 diff --git a/data/pert_ind_last_col.csv b/data/pert_ind_last_col.csv new file mode 100644 index 0000000..12979bc --- /dev/null +++ b/data/pert_ind_last_col.csv @@ -0,0 +1,89 @@ +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,4 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,5 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,6 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,7 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,8 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,9 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1.089,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.833,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,14 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,15 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,17 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,18 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,19 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,20 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,21 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,22 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.927,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,23 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,24 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,25 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,26 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,27 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,28 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,29 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,30 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,31 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,32 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.272,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,33 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,34 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,35 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,36 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,37 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,38 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,39 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,40 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,41 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.644,0.0,0.0,0.0,0.0,0.0,0.0,42 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,43 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,44 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,45 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,-0.622,0.0,0.0,0.0,0.0,0.0,0.0,46 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,-0.943,0.0,0.0,0.0,0.0,0.0,47 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,0.0,-0.474,48 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,-0.371,0.0,0.0,0.0,0.0,49 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,-2.252,0.0,0.0,0.0,50 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,0.0,51 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,-2.252,0.0,0.0,0.0,52 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-1.089,0.0,0.0,0.0,0.0,0.0,53 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,54 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-0.863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,55 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,-2.59,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,56 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,57 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,58 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,0.0,-0.578,0.0,59 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,0.0,-0.578,0.0,60 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,-0.578,0.0,61 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.578,-0.474,62 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,-0.578,0.0,63 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,-0.578,0.0,64 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,-0.578,0.0,65 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.474,66 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,0.0,0.0,-0.474,67 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,-0.474,68 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.556,69 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,70 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,71 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,0.0,72 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,-0.371,0.0,0.0,0.0,0.0,73 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,0.0,0.0,-0.474,74 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,-2.252,0.0,0.0,0.0,75 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.536,0.0,0.0,0.0,0.0,76 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,0.0,0.0,0.0,77 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-3.479,0.0,0.0,0.0,78 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,79 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,-0.551,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,80 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,-0.415,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,81 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,-0.515,0.0,0.0,0.0,0.0,-2.185,0.0,0.0,82 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,-0.622,0.0,0.0,0.0,-2.185,0.0,0.0,83 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,-0.943,0.0,0.0,-2.185,0.0,0.0,84 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.185,0.0,-0.474,85 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.371,0.0,-2.185,0.0,0.0,86 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.252,-2.185,0.0,0.0,87 +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-2.321,0.0,0.0,88 diff --git a/notebooks/dataloader.ipynb b/notebooks/dataloader.ipynb new file mode 100644 index 0000000..57f6980 --- /dev/null +++ b/notebooks/dataloader.ipynb @@ -0,0 +1,3431 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a notebook for testing the dataloaders" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-14 23:54:04.431174: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 AVX512F AVX512_VNNI FMA\n", + "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "2023-06-14 23:54:04.628344: I tensorflow/core/util/port.cc:104] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", + "2023-06-14 23:54:04.634777: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-14 23:54:04.634820: I tensorflow/compiler/xla/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\n", + "2023-06-14 23:54:09.211164: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-14 23:54:09.212455: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-14 23:54:09.212486: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /users/ngun7t/anaconda3/envs/cellbox-3.6-2/lib/python3.8/site-packages/tensorflow/python/compat/v2_compat.py:107: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "non-resource variables are not supported in the long term\n", + "================================================================================\n", + " _____ _ _ ____ \n", + " / ____| | | | _ \\ \n", + " | | ___| | | |_) | _____ __ \n", + " | | / _ \\ | | _ < / _ \\ \\/ / \n", + " | |___| __/ | | |_) | (_) > < \n", + " \\_____\\___|_|_|____/ \\___/_/\\_\\ \n", + "Running CellBox scripts developed in Sander lab\n", + "Maintained by Bo Yuan, Judy Shen, and Augustin Luna; contributions by Daniel Ritter\n", + "\n", + " version 0.3.2\n", + " -- Feb 10, 2023 --\n", + " * Modify CellBox to support TF2 \n", + " \n", + "Tutorials and documentations are available at https://github.com/sanderlab/CellBox\n", + "If you want to discuss the usage or to report a bug, please use the 'Issues' function at GitHub.\n", + "If you find CellBox useful for your research, please consider citing the corresponding publication.\n", + "For more information, please email us at boyuan@g.harvard.edu and c_shen@g.harvard.edu, augustin_luna@hms.harvard.edu\n", + " --------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "import cellbox\n", + "import os\n", + "import numpy as np\n", + "import pandas as pd\n", + "import tensorflow.compat.v1 as tf\n", + "import shutil\n", + "import argparse\n", + "import json\n", + "tf.disable_v2_behavior()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tensorflow original code" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'experiment_id': 'Example_LOO', 'model_prefix': 'drug', 'ckpt_name': 'model11.ckpt', 'export_verbose': 3, 'experiment_type': 'leave one out (w/o single)', 'sparse_data': False, 'batchsize': 16, 'trainset_ratio': 0.7, 'validset_ratio': 0.8, 'n_batches_eval': None, 'add_noise_level': 0, 'dT': 0.1, 'ode_solver': 'heun', 'envelope_form': 'tanh', 'envelope': 0, 'pert_form': 'by u', 'ode_degree': 1, 'ode_last_steps': 2, 'n_iter_buffer': 50, 'n_iter_patience': 100, 'weight_loss': 'None', 'l1lambda': 0.0001, 'l2lambda': 0.0001, 'model': 'CellBox', 'pert_file': '/users/ngun7t/Documents/cellbox-jun-6/data/pert_ind_last_col.csv', 'expr_file': '/users/ngun7t/Documents/cellbox-jun-6/data/expr_ind_last_col.csv', 'node_index_file': '/users/ngun7t/Documents/cellbox-jun-6/data/node_Index.csv', 'n_protein_nodes': 82, 'n_activity_nodes': 87, 'n_x': 100, 'envelop_form': 'tanh', 'envelop': 0, 'n_epoch': 10000, 'n_iter': 10000, 'stages': [{'nT': 100, 'sub_stages': [{'lr_val': 0.1, 'l1lambda': 0.01, 'n_iter_patience': 1000}, {'lr_val': 0.01, 'l1lambda': 0.01}, {'lr_val': 0.01, 'l1lambda': 0.0001}, {'lr_val': 0.001, 'l1lambda': 1e-05}]}, {'nT': 200, 'sub_stages': [{'lr_val': 0.001, 'l1lambda': 0.0001}]}, {'nT': 400, 'sub_stages': [{'lr_val': 0.001, 'l1lambda': 0.0001}]}], 'ckpt_path_full': './model11.ckpt', 'drug_index': 4, 'seed': 1000}\n", + "Working directory is ready at results/Example_LOO_a7102a7e8a4ad3c23e9eca13cab65b6f.\n", + "Hello!\n", + "Hello!\n", + "Hello!\n" + ] + } + ], + "source": [ + "def set_seed(in_seed):\n", + " int_seed = int(in_seed)\n", + " tf.compat.v1.set_random_seed(int_seed)\n", + " np.random.seed(int_seed)\n", + "\n", + "\n", + "def prepare_workdir(in_cfg):\n", + " # Read Data\n", + " in_cfg.root_dir = os.getcwd()\n", + " in_cfg.node_index = pd.read_csv(in_cfg.node_index_file, header=None, names=None) \\\n", + " if hasattr(in_cfg, 'node_index_file') else pd.DataFrame(np.arange(in_cfg.n_x))\n", + "\n", + " # Create Output Folder\n", + " experiment_path = 'results/{}_{}'.format(in_cfg.experiment_id, md5)\n", + " try:\n", + " os.makedirs(experiment_path)\n", + " except Exception:\n", + " pass\n", + " out_cfg = vars(in_cfg)\n", + " out_cfg = {key: out_cfg[key] for key in out_cfg if type(out_cfg[key]) is not pd.DataFrame}\n", + " os.chdir(experiment_path)\n", + " json.dump(out_cfg, open('config.json', 'w'), indent=4)\n", + "\n", + " if \"leave one out\" in in_cfg.experiment_type:\n", + " try:\n", + " in_cfg.model_prefix = '{}_{}'.format(in_cfg.model_prefix, in_cfg.drug_index)\n", + " except Exception('Drug index not specified') as e:\n", + " raise e\n", + "\n", + " in_cfg.working_index = in_cfg.model_prefix + \"_\" + str(working_index).zfill(3)\n", + "\n", + " try:\n", + " shutil.rmtree(in_cfg.working_index)\n", + " except Exception:\n", + " pass\n", + " os.makedirs(in_cfg.working_index)\n", + " os.chdir(in_cfg.working_index)\n", + "\n", + " with open(\"record_eval.csv\", 'w') as f:\n", + " f.write(\"epoch,iter,train_loss,valid_loss,train_mse,valid_mse,test_mse,time_elapsed\\n\")\n", + "\n", + " print('Working directory is ready at {}.'.format(experiment_path))\n", + " return 0\n", + "\n", + "experiment_config_path = \"/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.leave_one_drug_out.json\"\n", + "working_index = 0\n", + "stage = {\n", + " \"nT\": 100,\n", + " \"sub_stages\":[\n", + " {\"lr_val\": 0.1,\"l1lambda\": 0.01, \"n_iter_patience\":1000},\n", + " {\"lr_val\": 0.01,\"l1lambda\": 0.01},\n", + " {\"lr_val\": 0.01,\"l1lambda\": 0.0001},\n", + " {\"lr_val\": 0.001,\"l1lambda\": 0.00001}\n", + " ]}\n", + "\n", + "cfg = cellbox.config.Config(experiment_config_path)\n", + "cfg.ckpt_path_full = os.path.join('./', cfg.ckpt_name)\n", + "md5 = cellbox.utils.md5(cfg)\n", + "cfg.drug_index = 4 # Change this for testing purposes\n", + "cfg.seed = working_index + cfg.seed if hasattr(cfg, \"seed\") else working_index + 1000\n", + "set_seed(cfg.seed)\n", + "print(vars(cfg))\n", + "\n", + "prepare_workdir(cfg)\n", + "logger = cellbox.utils.TimeLogger(time_logger_step=1, hierachy=3)\n", + "args = cfg\n", + "for i, stage in enumerate(cfg.stages):\n", + " set_seed(cfg.seed)\n", + " cfg = cellbox.dataset.factory(cfg)\n", + " #args.sub_stages = stage['sub_stages']\n", + " #args.n_T = stage['nT']\n", + " #model = cellbox.model.factory(args)\n", + " if i == 2: break" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(60, 100)\n", + "(16, 100)\n", + "(13, 100)\n", + "\n" + ] + } + ], + "source": [ + "print(cfg.dataset[\"pert_train\"].shape)\n", + "print(cfg.dataset[\"pert_valid\"].shape)\n", + "print(cfg.dataset[\"pert_test\"].shape)\n", + "print(type(cfg.dataset[\"pert_train\"]))\n", + "\n", + "d = vars(cfg)\n", + "iter_train = d[\"iter_train\"]\n", + "iter_monitor = d[\"iter_monitor\"]\n", + "feed_dict = d[\"feed_dicts\"]" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### A function to identify which row will not show up in loo mode" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "def populate_drug_indices_map():\n", + " drug_indices_map = []\n", + " for drug_index in range(14):\n", + " double_idx = cfg.loo.all(axis=1)\n", + " testidx = (cfg.loo == drug_index).any(axis=1)\n", + "\n", + " if False:\n", + " testidx = pd.concat([testidx, double_idx], axis=1)\n", + " testidx = testidx.all(axis=1)\n", + "\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)[testidx]\n", + " for i in range(14):\n", + " if (loo_label == i).any(axis=1).all():\n", + " drug_indices_map.append(i)\n", + " break\n", + " return drug_indices_map\n", + "\n", + "def loo_rows(drug_index, drug_indices_map, singles):\n", + " true_drug_index = drug_indices_map[drug_index]\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)\n", + " ind_rows = loo_label.index[(loo_label[[0, 1]] == true_drug_index).any(axis=1)].tolist()\n", + " return np.array(ind_rows)\n", + "\n", + "drug_indices_map = populate_drug_indices_map()\n", + "if cfg.experiment_type == \"leave one out (w/o single)\":\n", + " inds = loo_rows(cfg.drug_index, drug_indices_map, False)\n", + "else:\n", + " inds = loo_rows(cfg.drug_index, drug_indices_map, True)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 8, 9]" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "drug_indices_map" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### A function to identify which row will not show up in single-to-combo mode" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "def s2c_row_inds():\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)\n", + " rows_with_single_drugs = loo_label.index[(loo_label[[0, 1]] == 0).any(axis=1)].tolist()\n", + " rows_with_multiple_drugs = list(set(list(range(loo_label.shape[0]))) - set(rows_with_single_drugs))\n", + " return rows_with_single_drugs, rows_with_multiple_drugs\n", + " \n", + "rows_with_single_drugs, rows_with_multiple_drugs = s2c_row_inds()" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finished\n" + ] + } + ], + "source": [ + "from tensorflow.core.protobuf import rewriter_config_pb2\n", + "from tensorflow.compat.v1.errors import OutOfRangeError\n", + "\n", + "config = tf.compat.v1.ConfigProto()\n", + "off = rewriter_config_pb2.RewriterConfig.OFF\n", + "config.graph_options.rewrite_options.memory_optimization = off\n", + "\n", + "# Launching session\n", + "sess = tf.compat.v1.Session(config=config)\n", + "sess.run(tf.compat.v1.global_variables_initializer())\n", + "\n", + "items_pert, items_expr = [], []\n", + "next_element = iter_train.get_next()\n", + "\n", + "sess.run(iter_train.initializer, feed_dict=feed_dict['train_set'])\n", + "\n", + "try:\n", + " while True:\n", + " result1, result2 = sess.run(next_element, feed_dict=feed_dict['train_set'])\n", + " items_pert.append(result1)\n", + " items_expr.append(result2)\n", + "except OutOfRangeError:\n", + " print(\"Finished\")" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "4" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(items_pert)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 2, 15, 25, 29, 30, 31, 32, 33, 36, 45, 58, 72, 81])" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "inds" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ind: [48. 82. 63. 67. 7. 28. 64. 9. 20. 74. 80. 12. 83. 11. 37. 41.] shares the ind that contains the drug index: []\n", + "Ind: [ 8. 6. 78. 18. 77. 60. 66. 56. 59. 68. 65. 70. 13. 86. 44. 3.] shares the ind that contains the drug index: []\n", + "Ind: [85. 54. 23. 49. 43. 4. 57. 26. 35. 40. 17. 88. 16. 39. 75. 10.] shares the ind that contains the drug index: []\n", + "Ind: [24. 19. 14. 27. 53. 34. 47. 5. 87. 21. 42. 38.] shares the ind that contains the drug index: []\n" + ] + } + ], + "source": [ + "for pert in items_pert:\n", + " ind = pert[:, -1]\n", + " print(f\"Ind: {ind} shares the ind that contains the drug index: {np.intersect1d(ind, inds)}\")\n", + " \n", + "#cfg.drug_index" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Pytorch code" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'experiment_id': 'Example_LOO', 'model_prefix': 'drug', 'ckpt_name': 'model11.ckpt', 'export_verbose': 3, 'experiment_type': 'leave one out (w/o single)', 'sparse_data': False, 'batchsize': 16, 'trainset_ratio': 0.7, 'validset_ratio': 0.8, 'n_batches_eval': None, 'add_noise_level': 0, 'dT': 0.1, 'ode_solver': 'heun', 'envelope_form': 'tanh', 'envelope': 0, 'pert_form': 'by u', 'ode_degree': 1, 'ode_last_steps': 2, 'n_iter_buffer': 50, 'n_iter_patience': 100, 'weight_loss': 'None', 'l1lambda': 0.0001, 'l2lambda': 0.0001, 'model': 'CellBox', 'pert_file': '/users/ngun7t/Documents/cellbox-jun-6/data/pert_ind_last_col.csv', 'expr_file': '/users/ngun7t/Documents/cellbox-jun-6/data/expr_ind_last_col.csv', 'node_index_file': '/users/ngun7t/Documents/cellbox-jun-6/data/node_Index.csv', 'n_protein_nodes': 82, 'n_activity_nodes': 87, 'n_x': 100, 'envelop_form': 'tanh', 'envelop': 0, 'n_epoch': 10000, 'n_iter': 10000, 'stages': [{'nT': 100, 'sub_stages': [{'lr_val': 0.1, 'l1lambda': 0.01, 'n_iter_patience': 1000}, {'lr_val': 0.01, 'l1lambda': 0.01}, {'lr_val': 0.01, 'l1lambda': 0.0001}, {'lr_val': 0.001, 'l1lambda': 1e-05}]}, {'nT': 200, 'sub_stages': [{'lr_val': 0.001, 'l1lambda': 0.0001}]}, {'nT': 400, 'sub_stages': [{'lr_val': 0.001, 'l1lambda': 0.0001}]}], 'ckpt_path_full': './model11.ckpt', 'drug_index': 5, 'seed': 1000}\n", + "Working directory is ready at results/Example_LOO_a7102a7e8a4ad3c23e9eca13cab65b6f.\n" + ] + } + ], + "source": [ + "def set_seed(in_seed):\n", + " int_seed = int(in_seed)\n", + " tf.compat.v1.set_random_seed(int_seed)\n", + " np.random.seed(int_seed)\n", + "\n", + "\n", + "def prepare_workdir(in_cfg):\n", + " # Read Data\n", + " in_cfg.root_dir = os.getcwd()\n", + " in_cfg.node_index = pd.read_csv(in_cfg.node_index_file, header=None, names=None) \\\n", + " if hasattr(in_cfg, 'node_index_file') else pd.DataFrame(np.arange(in_cfg.n_x))\n", + "\n", + " # Create Output Folder\n", + " experiment_path = 'results/{}_{}'.format(in_cfg.experiment_id, md5)\n", + " try:\n", + " os.makedirs(experiment_path)\n", + " except Exception:\n", + " pass\n", + " out_cfg = vars(in_cfg)\n", + " out_cfg = {key: out_cfg[key] for key in out_cfg if type(out_cfg[key]) is not pd.DataFrame}\n", + " os.chdir(experiment_path)\n", + " json.dump(out_cfg, open('config.json', 'w'), indent=4)\n", + "\n", + " if \"leave one out\" in in_cfg.experiment_type:\n", + " try:\n", + " in_cfg.model_prefix = '{}_{}'.format(in_cfg.model_prefix, in_cfg.drug_index)\n", + " except Exception('Drug index not specified') as e:\n", + " raise e\n", + "\n", + " in_cfg.working_index = in_cfg.model_prefix + \"_\" + str(working_index).zfill(3)\n", + "\n", + " try:\n", + " shutil.rmtree(in_cfg.working_index)\n", + " except Exception:\n", + " pass\n", + " os.makedirs(in_cfg.working_index)\n", + " os.chdir(in_cfg.working_index)\n", + "\n", + " with open(\"record_eval.csv\", 'w') as f:\n", + " f.write(\"epoch,iter,train_loss,valid_loss,train_mse,valid_mse,test_mse,time_elapsed\\n\")\n", + "\n", + " print('Working directory is ready at {}.'.format(experiment_path))\n", + " return 0\n", + "\n", + "experiment_config_path = \"/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.leave_one_drug_out.json\"\n", + "working_index = 0\n", + "stage = {\n", + " \"nT\": 100,\n", + " \"sub_stages\":[\n", + " {\"lr_val\": 0.1,\"l1lambda\": 0.01, \"n_iter_patience\":1000},\n", + " {\"lr_val\": 0.01,\"l1lambda\": 0.01},\n", + " {\"lr_val\": 0.01,\"l1lambda\": 0.0001},\n", + " {\"lr_val\": 0.001,\"l1lambda\": 0.00001}\n", + " ]}\n", + "\n", + "cfg = cellbox.config.Config(experiment_config_path)\n", + "cfg.ckpt_path_full = os.path.join('./', cfg.ckpt_name)\n", + "md5 = cellbox.utils.md5(cfg)\n", + "cfg.drug_index = 5 # Change this for testing purposes\n", + "cfg.seed = working_index + cfg.seed if hasattr(cfg, \"seed\") else working_index + 1000\n", + "set_seed(cfg.seed)\n", + "print(vars(cfg))\n", + "\n", + "prepare_workdir(cfg)\n", + "logger = cellbox.utils.TimeLogger(time_logger_step=1, hierachy=3)\n", + "args = cfg\n", + "for i, stage in enumerate(cfg.stages):\n", + " set_seed(cfg.seed)\n", + " cfg = cellbox.dataset_torch.factory(cfg)\n", + " #args.sub_stages = stage['sub_stages']\n", + " #args.n_T = stage['nT']\n", + " #model = cellbox.model.factory(args)\n", + " if i == 0: break" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def populate_drug_indices_map():\n", + " drug_indices_map = []\n", + " for drug_index in range(14):\n", + " double_idx = cfg.loo.all(axis=1)\n", + " testidx = (cfg.loo == drug_index).any(axis=1)\n", + "\n", + " if False:\n", + " testidx = pd.concat([testidx, double_idx], axis=1)\n", + " testidx = testidx.all(axis=1)\n", + "\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)[testidx]\n", + " for i in range(14):\n", + " if (loo_label == i).any(axis=1).all():\n", + " drug_indices_map.append(i)\n", + " break\n", + " return drug_indices_map\n", + "\n", + "def loo_rows(drug_index, drug_indices_map, singles):\n", + " true_drug_index = drug_indices_map[drug_index]\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)\n", + " ind_rows = loo_label.index[(loo_label[[0, 1]] == true_drug_index).any(axis=1)].tolist()\n", + " return np.array(ind_rows) - 1\n", + "\n", + "drug_indices_map = populate_drug_indices_map()\n", + "if cfg.experiment_type == \"leave one out (w/o single)\":\n", + " inds = loo_rows(cfg.drug_index, drug_indices_map, False)\n", + "else:\n", + " inds = loo_rows(cfg.drug_index, drug_indices_map, True)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "4\n" + ] + } + ], + "source": [ + "items_pert, items_expr = [], []\n", + "for pert, expr in cfg.iter_train:\n", + " items_pert.append(pert)\n", + " items_expr.append(expr)\n", + "\n", + "print(len(items_pert))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(16, 100)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.array(items_pert[0]).shape" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ind: tensor([21., 42., 28., 8., 11., 66., 86., 26., 45., 7., 19., 64., 49., 4.,\n", + " 75., 44.]) shares the ind that contains the drug index: []\n", + "Ind: tensor([55., 48., 85., 31., 10., 50., 24., 71., 87., 68., 33., 27., 23., 83.,\n", + " 30., 67.]) shares the ind that contains the drug index: []\n", + "Ind: tensor([74., 5., 9., 18., 32., 60., 65., 15., 61., 25., 88., 57., 13., 6.,\n", + " 81., 2.]) shares the ind that contains the drug index: []\n", + "Ind: tensor([77., 20., 58., 17., 80., 78., 54., 69., 43., 12., 29., 14.]) shares the ind that contains the drug index: []\n" + ] + } + ], + "source": [ + "for pert in items_pert:\n", + " ind = pert[:, -1]\n", + " print(f\"Ind: {ind} shares the ind that contains the drug index: {np.intersect1d(ind, inds+1)}\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Some random code" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)\n", + "ind_rows = loo_label.index[(loo_label[[0, 1]] == 0).any(axis=1)].tolist()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
01
010
1010
1120
1220
2330
2430
2940
3340
3450
4150
4260
4360
5170
5370
5480
6690
6990
70100
76100
77110
78110
79120
88120
\n", + "
" + ], + "text/plain": [ + " 0 1\n", + "0 1 0\n", + "10 1 0\n", + "11 2 0\n", + "12 2 0\n", + "23 3 0\n", + "24 3 0\n", + "29 4 0\n", + "33 4 0\n", + "34 5 0\n", + "41 5 0\n", + "42 6 0\n", + "43 6 0\n", + "51 7 0\n", + "53 7 0\n", + "54 8 0\n", + "66 9 0\n", + "69 9 0\n", + "70 10 0\n", + "76 10 0\n", + "77 11 0\n", + "78 11 0\n", + "79 12 0\n", + "88 12 0" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "loo_label.iloc[ind_rows]" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [], + "source": [ + "drug_index = int(cfg.drug_index)\n", + "double_idx = cfg.loo.all(axis=1)\n", + "\n", + "testidx = (cfg.loo == drug_index).any(axis=1)\n", + "\n", + "if True:\n", + " testidx = pd.concat([testidx, double_idx], axis=1)\n", + " testidx = testidx.all(axis=1)\n", + "\n", + "nexp, _ = cfg.pert.shape\n", + "nvalid = nexp - sum(testidx)\n", + "ntrain = int(nvalid * cfg.validset_ratio)\n", + "\n", + "valid_pos = np.random.choice(range(nvalid), nvalid, replace=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
01
010
113
214
315
416
.........
84710
851012
86810
87910
88100
\n", + "

89 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " 0 1\n", + "0 1 0\n", + "1 1 3\n", + "2 1 4\n", + "3 1 5\n", + "4 1 6\n", + ".. .. ..\n", + "84 7 10\n", + "85 10 12\n", + "86 8 10\n", + "87 9 10\n", + "88 10 0\n", + "\n", + "[89 rows x 2 columns]" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cfg.loo" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0 False\n", + "1 False\n", + "2 False\n", + "3 False\n", + "4 False\n", + " ... \n", + "84 True\n", + "85 True\n", + "86 True\n", + "87 True\n", + "88 False\n", + "Length: 89, dtype: bool" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "testidx" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0 False\n", + "1 False\n", + "2 False\n", + "3 False\n", + "4 False\n", + " ... \n", + "84 True\n", + "85 True\n", + "86 True\n", + "87 True\n", + "88 True\n", + "Length: 89, dtype: bool" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(cfg.loo == 10).any(axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "def pad_and_realign(x, length, idx_shift=0):\n", + " x -= idx_shift\n", + " padded = np.pad(x, (0, length - len(x)), 'constant')\n", + " return padded\n", + "\n", + "group_df = pd.DataFrame(np.where(cfg.pert != 0), index=['row_id', 'pert_idx']).T.groupby('row_id')\n", + "max_combo_degree = group_df.pert_idx.count().max()\n", + "cfg.loo = pd.DataFrame(group_df.pert_idx.apply(\n", + " lambda x: pad_and_realign(x, max_combo_degree, cfg.n_activity_nodes - 1)\n", + ").tolist())" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Config file and csv data" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Read and check shape" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(88, 99)\n", + "(88, 99)\n" + ] + } + ], + "source": [ + "import pandas as pd\n", + "\n", + "pert_df = pd.read_csv(\"/users/ngun7t/Documents/cellbox/data/pert.csv\")\n", + "expr_df = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/expr.csv\")\n", + "print(pert_df.shape)\n", + "print(expr_df.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
00.10.20.30.40.50.60.70.80.9...0.880.890.900.910.920.930.940.950.960.97
00000000000...-0.5510.0000.0000.0000.0000.00.00.00.00.0
10000000000...0.000-0.4150.0000.0000.0000.00.00.00.00.0
20000000000...0.0000.000-0.5150.0000.0000.00.00.00.00.0
30000000000...0.0000.0000.000-0.6220.0000.00.00.00.00.0
40000000000...0.0000.0000.0000.000-0.9430.00.00.00.00.0
\n", + "

5 rows × 99 columns

\n", + "
" + ], + "text/plain": [ + " 0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 ... 0.88 0.89 0.90 \\\n", + "0 0 0 0 0 0 0 0 0 0 0 ... -0.551 0.000 0.000 \n", + "1 0 0 0 0 0 0 0 0 0 0 ... 0.000 -0.415 0.000 \n", + "2 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 -0.515 \n", + "3 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 \n", + "4 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 \n", + "\n", + " 0.91 0.92 0.93 0.94 0.95 0.96 0.97 \n", + "0 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "1 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "2 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "3 -0.622 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "4 0.000 -0.943 0.0 0.0 0.0 0.0 0.0 \n", + "\n", + "[5 rows x 99 columns]" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pert_df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
-0.435-0.608-0.855-0.712-0.113-0.0890.0960.2910.4280.375...0.40.50.60.70.80.90.100.110.120.13
0-0.308-0.322-0.999-0.5940.022-0.0610.0930.2220.2550.645...-0.890.0000.0000.0000.0000.00.00.00.00.0
1-0.402-0.549-0.837-0.675-0.011-0.1030.0890.4180.2760.439...0.00-0.3930.0000.0000.0000.00.00.00.00.0
2-0.475-0.652-0.935-0.751-0.085-0.0190.0640.2070.1470.416...0.000.000-0.4740.0000.0000.00.00.00.00.0
3-0.611-1.097-1.234-0.928-0.114-0.113-0.002-0.0420.2770.732...0.000.0000.000-0.5520.0000.00.00.00.00.0
4-0.649-0.490-0.294-0.491-0.251-0.361-0.018-0.1441.1910.098...0.000.0000.0000.000-0.7370.00.00.00.00.0
\n", + "

5 rows × 99 columns

\n", + "
" + ], + "text/plain": [ + " -0.435 -0.608 -0.855 -0.712 -0.113 -0.089 0.096 0.291 0.428 0.375 \\\n", + "0 -0.308 -0.322 -0.999 -0.594 0.022 -0.061 0.093 0.222 0.255 0.645 \n", + "1 -0.402 -0.549 -0.837 -0.675 -0.011 -0.103 0.089 0.418 0.276 0.439 \n", + "2 -0.475 -0.652 -0.935 -0.751 -0.085 -0.019 0.064 0.207 0.147 0.416 \n", + "3 -0.611 -1.097 -1.234 -0.928 -0.114 -0.113 -0.002 -0.042 0.277 0.732 \n", + "4 -0.649 -0.490 -0.294 -0.491 -0.251 -0.361 -0.018 -0.144 1.191 0.098 \n", + "\n", + " ... 0.4 0.5 0.6 0.7 0.8 0.9 0.10 0.11 0.12 0.13 \n", + "0 ... -0.89 0.000 0.000 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "1 ... 0.00 -0.393 0.000 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "2 ... 0.00 0.000 -0.474 0.000 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "3 ... 0.00 0.000 0.000 -0.552 0.000 0.0 0.0 0.0 0.0 0.0 \n", + "4 ... 0.00 0.000 0.000 0.000 -0.737 0.0 0.0 0.0 0.0 0.0 \n", + "\n", + "[5 rows x 99 columns]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "expr_df.head()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Add index to Cellbox csv files" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "pert_df = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/pert.csv\", header=None)\n", + "expr_df = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/expr.csv\", header=None)\n", + "\n", + "nrows = pert_df.shape[0]\n", + "ind = list(np.arange(0, nrows))\n", + "\n", + "pert_df[99] = ind\n", + "pert_df.to_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/pert_ind_last_col.csv\", index=False, header=False)\n", + "\n", + "expr_df[99] = ind\n", + "expr_df.to_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/expr_ind_last_col.csv\", index=False, header=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0123456789...90919293949596979899
00000000000...0.0000.0000.0000.00.00.00.00.00.00
10000000000...0.0000.0000.0000.00.00.00.00.00.01
20000000000...-0.4150.0000.0000.00.00.00.00.00.02
30000000000...0.000-0.5150.0000.00.00.00.00.00.03
40000000000...0.0000.000-0.6220.00.00.00.00.00.04
\n", + "

5 rows × 100 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2 3 4 5 6 7 8 9 ... 90 91 92 93 94 \\\n", + "0 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 0.0 0.0 \n", + "1 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 0.0 0.0 \n", + "2 0 0 0 0 0 0 0 0 0 0 ... -0.415 0.000 0.000 0.0 0.0 \n", + "3 0 0 0 0 0 0 0 0 0 0 ... 0.000 -0.515 0.000 0.0 0.0 \n", + "4 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 -0.622 0.0 0.0 \n", + "\n", + " 95 96 97 98 99 \n", + "0 0.0 0.0 0.0 0.0 0 \n", + "1 0.0 0.0 0.0 0.0 1 \n", + "2 0.0 0.0 0.0 0.0 2 \n", + "3 0.0 0.0 0.0 0.0 3 \n", + "4 0.0 0.0 0.0 0.0 4 \n", + "\n", + "[5 rows x 100 columns]" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pert_df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0123456789...90919293949596979899
00000000000...0.0000.0000.0000.00.00.00.00.00.00
10000000000...0.0000.0000.0000.00.00.00.00.00.01
20000000000...-0.4150.0000.0000.00.00.00.00.00.02
30000000000...0.000-0.5150.0000.00.00.00.00.00.03
40000000000...0.0000.000-0.6220.00.00.00.00.00.04
\n", + "

5 rows × 100 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2 3 4 5 6 7 8 9 ... 90 91 92 93 94 \\\n", + "0 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 0.0 0.0 \n", + "1 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 0.000 0.0 0.0 \n", + "2 0 0 0 0 0 0 0 0 0 0 ... -0.415 0.000 0.000 0.0 0.0 \n", + "3 0 0 0 0 0 0 0 0 0 0 ... 0.000 -0.515 0.000 0.0 0.0 \n", + "4 0 0 0 0 0 0 0 0 0 0 ... 0.000 0.000 -0.622 0.0 0.0 \n", + "\n", + " 95 96 97 98 99 \n", + "0 0.0 0.0 0.0 0.0 0 \n", + "1 0.0 0.0 0.0 0.0 1 \n", + "2 0.0 0.0 0.0 0.0 2 \n", + "3 0.0 0.0 0.0 0.0 3 \n", + "4 0.0 0.0 0.0 0.0 4 \n", + "\n", + "[5 rows x 100 columns]" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pert_df = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/pert_ind_last_col.csv\", header=None)\n", + "pert_df.head()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Try out new CellBox Pytorch DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([4, 99]) and shape of expr: torch.Size([4, 99])\n", + "Shape of pert: torch.Size([1, 99]) and shape of expr: torch.Size([1, 99])\n" + ] + } + ], + "source": [ + "iter_train = cfg.iter_train\n", + "\n", + "perts = []\n", + "exprs = []\n", + "for pert, expr in iter_train:\n", + " perts.append(pert)\n", + " exprs.append(expr)\n", + " #print(f\"Shape of pert: {pert.size()} and shape of expr: {expr.size()}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "from torch.utils.data import Dataset, TensorDataset\n", + "from torch.utils.data import DataLoader" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([89, 99])\n", + "torch.Size([89, 1])\n" + ] + } + ], + "source": [ + "full_data = torch.from_numpy(cfg.dataset[\"pert_full\"].to_numpy())\n", + "temp_y = torch.from_numpy(np.zeros((full_data.shape[0], 1)))\n", + "\n", + "print(full_data.size())\n", + "print(temp_y.size())\n", + "\n", + "dataset = TensorDataset(full_data, temp_y)\n", + "dataloader = DataLoader(\n", + " dataset, batch_size=3, shuffle=True\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Scratch" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
012
0100
11313
21413
31513
41613
............
8471013
85101213
8681013
8791013
8810130
\n", + "

89 rows × 3 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2\n", + "0 1 0 0\n", + "1 1 3 13\n", + "2 1 4 13\n", + "3 1 5 13\n", + "4 1 6 13\n", + ".. .. .. ..\n", + "84 7 10 13\n", + "85 10 12 13\n", + "86 8 10 13\n", + "87 9 10 13\n", + "88 10 13 0\n", + "\n", + "[89 rows x 3 columns]" + ] + }, + "execution_count": 30, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cfg.loo" + ] + }, + { + "cell_type": "code", + "execution_count": 94, + "metadata": {}, + "outputs": [], + "source": [ + "a = np.pad([1,2,3,4,5], (0, 2), \"constant\")" + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([1, 2, 3, 4, 5, 0, 0])" + ] + }, + "execution_count": 95, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a" + ] + }, + { + "cell_type": "code", + "execution_count": 97, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[87]\n", + "[87, 89, 99]\n", + "[87, 90, 99]\n", + "[87, 91, 99]\n", + "[87, 92, 99]\n", + "[87, 93, 99]\n", + "[87, 98, 99]\n", + "[87, 94, 99]\n", + "[87, 95, 99]\n", + "[87, 96, 99]\n", + "[87, 99]\n", + "[88, 99]\n", + "[88, 99]\n", + "[87, 88, 99]\n", + "[88, 89, 99]\n", + "[88, 90, 99]\n", + "[88, 91, 99]\n", + "[88, 92, 99]\n", + "[88, 93, 99]\n", + "[88, 98, 99]\n", + "[88, 94, 99]\n", + "[88, 95, 99]\n", + "[88, 96, 99]\n", + "[89, 99]\n", + "[89, 99]\n", + "[89, 90, 99]\n", + "[89, 93, 99]\n", + "[89, 98, 99]\n", + "[89, 95, 99]\n", + "[90, 99]\n", + "[90, 93, 99]\n", + "[90, 98, 99]\n", + "[90, 95, 99]\n", + "[90, 99]\n", + "[91, 99]\n", + "[89, 91, 99]\n", + "[90, 91, 99]\n", + "[91, 93, 99]\n", + "[91, 98, 99]\n", + "[91, 94, 99]\n", + "[91, 95, 99]\n", + "[91, 99]\n", + "[92, 99]\n", + "[92, 99]\n", + "[89, 92, 99]\n", + "[90, 92, 99]\n", + "[91, 92, 99]\n", + "[92, 93, 99]\n", + "[92, 98, 99]\n", + "[92, 94, 99]\n", + "[92, 95, 99]\n", + "[93, 99]\n", + "[93, 95, 99]\n", + "[93, 99]\n", + "[97, 99]\n", + "[87, 97, 99]\n", + "[88, 97, 99]\n", + "[89, 97, 99]\n", + "[90, 97, 99]\n", + "[91, 97, 99]\n", + "[92, 97, 99]\n", + "[93, 97, 99]\n", + "[97, 98, 99]\n", + "[94, 97, 99]\n", + "[95, 97, 99]\n", + "[96, 97, 99]\n", + "[98, 99]\n", + "[93, 98, 99]\n", + "[95, 98, 99]\n", + "[98, 99]\n", + "[94, 99]\n", + "[89, 94, 99]\n", + "[90, 94, 99]\n", + "[93, 94, 99]\n", + "[94, 98, 99]\n", + "[94, 95, 99]\n", + "[94, 99]\n", + "[95, 99]\n", + "[95, 99]\n", + "[96, 99]\n", + "[89, 96, 99]\n", + "[90, 96, 99]\n", + "[91, 96, 99]\n", + "[92, 96, 99]\n", + "[93, 96, 99]\n", + "[96, 98, 99]\n", + "[94, 96, 99]\n", + "[95, 96, 99]\n", + "[96, 99]\n" + ] + } + ], + "source": [ + "def pad_and_realign(x, length, idx_shift=0):\n", + " print(list(x))\n", + " x -= idx_shift\n", + " padded = np.pad(x, (0, length - len(x)), 'constant')\n", + " return padded\n", + "\n", + "group_df = pd.DataFrame(np.where(cfg.pert != 0), index=['row_id', 'pert_idx']).T.groupby('row_id')\n", + "max_combo_degree = group_df.pert_idx.count().max()\n", + "cfg.loo = pd.DataFrame(group_df.pert_idx.apply(\n", + " lambda x: pad_and_realign(x, max_combo_degree, cfg.n_activity_nodes - 1)\n", + ").tolist())" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0123456789...90919293949596979899
00.00.00.00.00.00.00.00.00.00.0...0.0000.0000.0000.00.00.00.00.00.00.0
10.00.00.00.00.00.00.00.00.00.0...0.0000.0000.0000.00.00.00.00.00.01.0
20.00.00.00.00.00.00.00.00.00.0...-0.4150.0000.0000.00.00.00.00.00.02.0
30.00.00.00.00.00.00.00.00.00.0...0.000-0.5150.0000.00.00.00.00.00.03.0
40.00.00.00.00.00.00.00.00.00.0...0.0000.000-0.6220.00.00.00.00.00.04.0
\n", + "

5 rows × 100 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2 3 4 5 6 7 8 9 ... 90 91 92 \\\n", + "0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.000 0.000 0.000 \n", + "1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.000 0.000 0.000 \n", + "2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... -0.415 0.000 0.000 \n", + "3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.000 -0.515 0.000 \n", + "4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.000 0.000 -0.622 \n", + "\n", + " 93 94 95 96 97 98 99 \n", + "0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n", + "1 0.0 0.0 0.0 0.0 0.0 0.0 1.0 \n", + "2 0.0 0.0 0.0 0.0 0.0 0.0 2.0 \n", + "3 0.0 0.0 0.0 0.0 0.0 0.0 3.0 \n", + "4 0.0 0.0 0.0 0.0 0.0 0.0 4.0 \n", + "\n", + "[5 rows x 100 columns]" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cfg.pert.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 99, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
row_idpert_idx
0087
1187
2189
3199
4287
.........
2388795
2398796
2408799
2418896
2428899
\n", + "

243 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " row_id pert_idx\n", + "0 0 87\n", + "1 1 87\n", + "2 1 89\n", + "3 1 99\n", + "4 2 87\n", + ".. ... ...\n", + "238 87 95\n", + "239 87 96\n", + "240 87 99\n", + "241 88 96\n", + "242 88 99\n", + "\n", + "[243 rows x 2 columns]" + ] + }, + "execution_count": 99, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "group_df.head(10)" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 88, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "group_df.pert_idx" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
012
0100
11313
21413
31513
41613
............
8471013
85101213
8681013
8791013
8810130
\n", + "

89 rows × 3 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2\n", + "0 1 0 0\n", + "1 1 3 13\n", + "2 1 4 13\n", + "3 1 5 13\n", + "4 1 6 13\n", + ".. .. .. ..\n", + "84 7 10 13\n", + "85 10 12 13\n", + "86 8 10 13\n", + "87 9 10 13\n", + "88 10 13 0\n", + "\n", + "[89 rows x 3 columns]" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cfg.loo" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
012
0100
5511113
131213
101130
911013
............
8810130
62111213
5411130
6912130
6612130
\n", + "

89 rows × 3 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2\n", + "0 1 0 0\n", + "55 1 11 13\n", + "13 1 2 13\n", + "10 1 13 0\n", + "9 1 10 13\n", + ".. .. .. ..\n", + "88 10 13 0\n", + "62 11 12 13\n", + "54 11 13 0\n", + "69 12 13 0\n", + "66 12 13 0\n", + "\n", + "[89 rows x 3 columns]" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cfg.loo.sort_values(0)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0123456789...233234235236237238239240241242
row_id0111222333...85858686868787878888
pert_idx87878999879099879199...98999496999596999699
\n", + "

2 rows × 243 columns

\n", + "
" + ], + "text/plain": [ + " 0 1 2 3 4 5 6 7 8 9 ... 233 234 \\\n", + "row_id 0 1 1 1 2 2 2 3 3 3 ... 85 85 \n", + "pert_idx 87 87 89 99 87 90 99 87 91 99 ... 98 99 \n", + "\n", + " 235 236 237 238 239 240 241 242 \n", + "row_id 86 86 86 87 87 87 88 88 \n", + "pert_idx 94 96 99 95 96 99 96 99 \n", + "\n", + "[2 rows x 243 columns]" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pd.DataFrame(np.where(cfg.pert != 0), index=['row_id', 'pert_idx'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[array([1, 0, 0]),\n", + " array([ 1, 3, 13]),\n", + " array([ 1, 4, 13]),\n", + " array([ 1, 5, 13]),\n", + " array([ 1, 6, 13]),\n", + " array([ 1, 7, 13]),\n", + " array([ 1, 12, 13]),\n", + " array([ 1, 8, 13]),\n", + " array([ 1, 9, 13]),\n", + " array([ 1, 10, 13]),\n", + " array([ 1, 13, 0]),\n", + " array([ 2, 13, 0]),\n", + " array([ 2, 13, 0]),\n", + " array([ 1, 2, 13]),\n", + " array([ 2, 3, 13]),\n", + " array([ 2, 4, 13]),\n", + " array([ 2, 5, 13]),\n", + " array([ 2, 6, 13]),\n", + " array([ 2, 7, 13]),\n", + " array([ 2, 12, 13]),\n", + " array([ 2, 8, 13]),\n", + " array([ 2, 9, 13]),\n", + " array([ 2, 10, 13]),\n", + " array([ 3, 13, 0]),\n", + " array([ 3, 13, 0]),\n", + " array([ 3, 4, 13]),\n", + " array([ 3, 7, 13]),\n", + " array([ 3, 12, 13]),\n", + " array([ 3, 9, 13]),\n", + " array([ 4, 13, 0]),\n", + " array([ 4, 7, 13]),\n", + " array([ 4, 12, 13]),\n", + " array([ 4, 9, 13]),\n", + " array([ 4, 13, 0]),\n", + " array([ 5, 13, 0]),\n", + " array([ 3, 5, 13]),\n", + " array([ 4, 5, 13]),\n", + " array([ 5, 7, 13]),\n", + " array([ 5, 12, 13]),\n", + " array([ 5, 8, 13]),\n", + " array([ 5, 9, 13]),\n", + " array([ 5, 13, 0]),\n", + " array([ 6, 13, 0]),\n", + " array([ 6, 13, 0]),\n", + " array([ 3, 6, 13]),\n", + " array([ 4, 6, 13]),\n", + " array([ 5, 6, 13]),\n", + " array([ 6, 7, 13]),\n", + " array([ 6, 12, 13]),\n", + " array([ 6, 8, 13]),\n", + " array([ 6, 9, 13]),\n", + " array([ 7, 13, 0]),\n", + " array([ 7, 9, 13]),\n", + " array([ 7, 13, 0]),\n", + " array([11, 13, 0]),\n", + " array([ 1, 11, 13]),\n", + " array([ 2, 11, 13]),\n", + " array([ 3, 11, 13]),\n", + " array([ 4, 11, 13]),\n", + " array([ 5, 11, 13]),\n", + " array([ 6, 11, 13]),\n", + " array([ 7, 11, 13]),\n", + " array([11, 12, 13]),\n", + " array([ 8, 11, 13]),\n", + " array([ 9, 11, 13]),\n", + " array([10, 11, 13]),\n", + " array([12, 13, 0]),\n", + " array([ 7, 12, 13]),\n", + " array([ 9, 12, 13]),\n", + " array([12, 13, 0]),\n", + " array([ 8, 13, 0]),\n", + " array([ 3, 8, 13]),\n", + " array([ 4, 8, 13]),\n", + " array([ 7, 8, 13]),\n", + " array([ 8, 12, 13]),\n", + " array([ 8, 9, 13]),\n", + " array([ 8, 13, 0]),\n", + " array([ 9, 13, 0]),\n", + " array([ 9, 13, 0]),\n", + " array([10, 13, 0]),\n", + " array([ 3, 10, 13]),\n", + " array([ 4, 10, 13]),\n", + " array([ 5, 10, 13]),\n", + " array([ 6, 10, 13]),\n", + " array([ 7, 10, 13]),\n", + " array([10, 12, 13]),\n", + " array([ 8, 10, 13]),\n", + " array([ 9, 10, 13]),\n", + " array([10, 13, 0])]" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "group_df.pert_idx.apply(\n", + " lambda x: pad_and_realign(x, max_combo_degree, cfg.n_activity_nodes - 1)\n", + ").tolist()" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(array([0, 0, 0, 1, 1, 1]), array([3, 4, 5, 3, 4, 5]))" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.where(np.array([[0,0,0,4,4,5], [0,0,0,4,4,5]]) > 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [], + "source": [ + "drug_indices_map = []\n", + "for drug_index in range(14):\n", + " double_idx = cfg.loo.all(axis=1)\n", + " testidx = (cfg.loo == drug_index).any(axis=1)\n", + "\n", + " if False:\n", + " testidx = pd.concat([testidx, double_idx], axis=1)\n", + " testidx = testidx.all(axis=1)\n", + "\n", + " loo_label = pd.read_csv(\"/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv\", header=None)[testidx]\n", + " for i in range(14):\n", + " if (loo_label == i).any(axis=1).all():\n", + " drug_indices_map.append(i)\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[9, 22, 65, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88]" + ] + }, + "execution_count": 115, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ind" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
01
9112
22212
65812
79120
80123
81124
82125
83126
84127
85129
861210
871211
88120
\n", + "
" + ], + "text/plain": [ + " 0 1\n", + "9 1 12\n", + "22 2 12\n", + "65 8 12\n", + "79 12 0\n", + "80 12 3\n", + "81 12 4\n", + "82 12 5\n", + "83 12 6\n", + "84 12 7\n", + "85 12 9\n", + "86 12 10\n", + "87 12 11\n", + "88 12 0" + ] + }, + "execution_count": 117, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ind = loo_label.index[(loo_label[[0, 1]] == 12).any(axis=1)].tolist()\n", + "loo_label.iloc[np.array(ind) - 1]" + ] + }, + { + "cell_type": "code", + "execution_count": 114, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
01
113
214
315
416
517
.........
84127
85129
861210
871211
88120
\n", + "

88 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " 0 1\n", + "1 1 3\n", + "2 1 4\n", + "3 1 5\n", + "4 1 6\n", + "5 1 7\n", + ".. .. ..\n", + "84 12 7\n", + "85 12 9\n", + "86 12 10\n", + "87 12 11\n", + "88 12 0\n", + "\n", + "[88 rows x 2 columns]" + ] + }, + "execution_count": 114, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "loo_label" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "cellbox-3.6", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/model_training.ipynb b/notebooks/model_training.ipynb new file mode 100644 index 0000000..acc7021 --- /dev/null +++ b/notebooks/model_training.ipynb @@ -0,0 +1,201 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a notebook for testing the model training process between Tensorflow and Pytorch models" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tensorflow model" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-13 19:01:23.637396: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 AVX512F AVX512_VNNI FMA\n", + "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "2023-06-13 19:01:25.469002: I tensorflow/core/util/port.cc:104] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", + "2023-06-13 19:01:25.952049: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-13 19:01:25.952113: I tensorflow/compiler/xla/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\n", + "2023-06-13 19:01:45.525129: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-13 19:01:45.526670: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /cm/shared/apps/lsf10/10.1/linux3.10-glibc2.17-x86_64/lib:/data/weirauchlab/opt/lib:/data/weirauchlab/opt/lib64:/data/weirauchlab/local/lib\n", + "2023-06-13 19:01:45.526699: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /users/ngun7t/anaconda3/envs/cellbox-3.6-2/lib/python3.8/site-packages/tensorflow/python/compat/v2_compat.py:107: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "non-resource variables are not supported in the long term\n", + "================================================================================\n", + " _____ _ _ ____ \n", + " / ____| | | | _ \\ \n", + " | | ___| | | |_) | _____ __ \n", + " | | / _ \\ | | _ < / _ \\ \\/ / \n", + " | |___| __/ | | |_) | (_) > < \n", + " \\_____\\___|_|_|____/ \\___/_/\\_\\ \n", + "Running CellBox scripts developed in Sander lab\n", + "Maintained by Bo Yuan, Judy Shen, and Augustin Luna; contributions by Daniel Ritter\n", + "\n", + " version 0.3.2\n", + " -- Feb 10, 2023 --\n", + " * Modify CellBox to support TF2 \n", + " \n", + "Tutorials and documentations are available at https://github.com/sanderlab/CellBox\n", + "If you want to discuss the usage or to report a bug, please use the 'Issues' function at GitHub.\n", + "If you find CellBox useful for your research, please consider citing the corresponding publication.\n", + "For more information, please email us at boyuan@g.harvard.edu and c_shen@g.harvard.edu, augustin_luna@hms.harvard.edu\n", + " --------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "import cellbox\n", + "import os\n", + "import numpy as np\n", + "import pandas as pd\n", + "import tensorflow.compat.v1 as tf\n", + "import shutil\n", + "import argparse\n", + "import json\n", + "tf.disable_v2_behavior()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Setting up the model and cfgs" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'experiment_id': 'Example_RP', 'model_prefix': 'seed', 'ckpt_name': 'model11.ckpt', 'export_verbose': 3, 'experiment_type': 'random partition', 'sparse_data': False, 'batchsize': 4, 'trainset_ratio': 0.7, 'validset_ratio': 0.8, 'n_batches_eval': None, 'add_noise_level': 0, 'dT': 0.1, 'ode_solver': 'heun', 'envelope_form': 'tanh', 'envelope': 0, 'pert_form': 'by u', 'ode_degree': 1, 'ode_last_steps': 2, 'n_iter_buffer': 50, 'n_iter_patience': 100, 'weight_loss': 'None', 'l1lambda': 0.0001, 'l2lambda': 0.0001, 'model': 'CellBox', 'pert_file': '/users/ngun7t/Documents/cellbox-jun-6/data/pert.csv', 'expr_file': '/users/ngun7t/Documents/cellbox-jun-6/data/expr.csv', 'node_index_file': '/users/ngun7t/Documents/cellbox-jun-6/data/node_Index.csv', 'n_protein_nodes': 82, 'n_activity_nodes': 87, 'n_x': 99, 'envelop_form': 'tanh', 'envelop': 0, 'n_epoch': 10000, 'n_iter': 10000, 'stages': [{'nT': 200, 'sub_stages': [{'lr_val': 0.001, 'l1lambda': 0.0001}]}], 'ckpt_path_full': './model11.ckpt', 'drug_index': 5, 'seed': 1000}\n", + "Working directory is ready at results/Example_RP_a9ad774ed7261db29240e2faac13c097.\n", + "Hello!\n" + ] + } + ], + "source": [ + "def set_seed(in_seed):\n", + " int_seed = int(in_seed)\n", + " tf.compat.v1.set_random_seed(int_seed)\n", + " np.random.seed(int_seed)\n", + "\n", + "\n", + "def prepare_workdir(in_cfg):\n", + " # Read Data\n", + " in_cfg.root_dir = os.getcwd()\n", + " in_cfg.node_index = pd.read_csv(in_cfg.node_index_file, header=None, names=None) \\\n", + " if hasattr(in_cfg, 'node_index_file') else pd.DataFrame(np.arange(in_cfg.n_x))\n", + "\n", + " # Create Output Folder\n", + " experiment_path = 'results/{}_{}'.format(in_cfg.experiment_id, md5)\n", + " try:\n", + " os.makedirs(experiment_path)\n", + " except Exception:\n", + " pass\n", + " out_cfg = vars(in_cfg)\n", + " out_cfg = {key: out_cfg[key] for key in out_cfg if type(out_cfg[key]) is not pd.DataFrame}\n", + " os.chdir(experiment_path)\n", + " json.dump(out_cfg, open('config.json', 'w'), indent=4)\n", + "\n", + " if \"leave one out\" in in_cfg.experiment_type:\n", + " try:\n", + " in_cfg.model_prefix = '{}_{}'.format(in_cfg.model_prefix, in_cfg.drug_index)\n", + " except Exception('Drug index not specified') as e:\n", + " raise e\n", + "\n", + " in_cfg.working_index = in_cfg.model_prefix + \"_\" + str(working_index).zfill(3)\n", + "\n", + " try:\n", + " shutil.rmtree(in_cfg.working_index)\n", + " except Exception:\n", + " pass\n", + " os.makedirs(in_cfg.working_index)\n", + " os.chdir(in_cfg.working_index)\n", + "\n", + " with open(\"record_eval.csv\", 'w') as f:\n", + " f.write(\"epoch,iter,train_loss,valid_loss,train_mse,valid_mse,test_mse,time_elapsed\\n\")\n", + "\n", + " print('Working directory is ready at {}.'.format(experiment_path))\n", + " return 0\n", + "\n", + "experiment_config_path = \"/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.random_partition.json\"\n", + "working_index = 0\n", + "\n", + "cfg = cellbox.config.Config(experiment_config_path)\n", + "cfg.ckpt_path_full = os.path.join('./', cfg.ckpt_name)\n", + "md5 = cellbox.utils.md5(cfg)\n", + "cfg.drug_index = 5 # Change this for testing purposes\n", + "cfg.seed = working_index + cfg.seed if hasattr(cfg, \"seed\") else working_index + 1000\n", + "set_seed(cfg.seed)\n", + "print(vars(cfg))\n", + "\n", + "prepare_workdir(cfg)\n", + "logger = cellbox.utils.TimeLogger(time_logger_step=1, hierachy=3)\n", + "args = cfg\n", + "for i, stage in enumerate(cfg.stages):\n", + " set_seed(cfg.seed)\n", + " cfg = cellbox.dataset.factory(cfg)\n", + " args.sub_stages = stage['sub_stages']\n", + " args.n_T = stage['nT']\n", + " model = cellbox.model.factory(args)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cellbox.train.train_model(model, args)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "cellbox-3.6-2", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/scratch.ipynb b/notebooks/scratch.ipynb new file mode 100644 index 0000000..a8b7b1a --- /dev/null +++ b/notebooks/scratch.ipynb @@ -0,0 +1,60 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "a = [np.random.randint(0, 10, size=(3,2)) for _ in range(10)]\n", + "b = [np.random.randint(0, 10, size=(3,2)) for _ in range(10)]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a = np.array([1,2,3,4])\n", + "b = np.array([4,6,7,8])\n", + "np.intersect1d(a, b).size" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "cellbox-3.6-2", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.0" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/test.py b/test.py index b7c6010..10d6fd6 100644 --- a/test.py +++ b/test.py @@ -1,13 +1,129 @@ import pytest import os import glob +import numpy as np +from test_utils.dataloader import get_dataloader, yield_data_from_tensorflow_dataloader, yield_data_from_pytorch_dataloader, \ + s2c_row_inds, loo_row_inds -def test_model(): - os.system('python scripts/main.py -config=configs/Example.minimal.json') - files = glob.glob('results/Debugging_*/seed_000/3_best.W*') - assert len(files)==1 +#def test_model(): +# os.system('python scripts/main.py -config=configs/Example.minimal.json') +# files = glob.glob('results/Debugging_*/seed_000/3_best.W*') +# assert False + +#################################################### Tests for DataLoaders #################################################### + +# Test for correct shape +def test_correct_shape(): + """ + A function to test if the batch yielded by both Tensorflow and Pytorch has the same shape + """ + experiment_config_path = "/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.random_partition.json" + tensorflow_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=True) + pytorch_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=False) + + # Code to extract the shape of each yield + for tf_dict, torch_dict in zip(tensorflow_dataloader_list, pytorch_dataloader_list): + tf_train_pert, tf_train_expr = yield_data_from_tensorflow_dataloader( + dataloader=tf_dict["iter_train"], + feed_dict=tf_dict["feed_dict"] + ) + torch_train_pert, torch_train_expr = yield_data_from_pytorch_dataloader( + dataloader=torch_dict["iter_train"] + ) + + # Assert that the count of batches obtained is equal + assert len(tf_train_pert) == len(torch_train_pert), "Length of number of arrays yield for train pert not equal" + assert len(tf_train_expr) == len(torch_train_expr), "Length of number of arrays yield for train expr not equal" + + # Assert that the shape of each batch is equal + for tf_arr, torch_arr in zip(tf_train_pert, torch_train_pert): + assert tf_arr.shape == np.array(torch_arr).shape, f"For pert batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + + # Assert that the shape of each batch is equal + for tf_arr, torch_arr in zip(tf_train_expr, torch_train_expr): + assert tf_arr.shape == np.array(torch_arr).shape, f"For expr batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + + +# Test for correct input rows for single-to-combo +def test_single_to_combo(): + """ + A function to test if pytorch and tensorflow dataloaders yield the correct rows in the dataset for s2c experiment + """ + experiment_config_path = "/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.single_to_combo.json" + loo_label_dir = "/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv" + tensorflow_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=True) + pytorch_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=False) + + # Get the row index that contains single drugs + rows_with_single_drugs, rows_with_multiple_drugs = s2c_row_inds(loo_label_dir) + + # Code to extract the shape of each yield + for tf_dict, torch_dict in zip(tensorflow_dataloader_list, pytorch_dataloader_list): + tf_train_pert, tf_train_expr = yield_data_from_tensorflow_dataloader( + dataloader=tf_dict["iter_train"], + feed_dict=tf_dict["feed_dict"] + ) + torch_train_pert, torch_train_expr = yield_data_from_pytorch_dataloader( + dataloader=torch_dict["iter_train"] + ) + # Assert that the count of batches obtained is equal + assert len(tf_train_pert) == len(torch_train_pert), "Length of number of arrays yield for train pert not equal" + assert len(tf_train_expr) == len(torch_train_expr), "Length of number of arrays yield for train expr not equal" + + # Assert that the shape of each batch is equal, and also it contains the correct row index + for tf_arr, torch_arr in zip(tf_train_pert, torch_train_pert): + assert tf_arr.shape == np.array(torch_arr).shape, f"For pert batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + assert np.intersect1d(tf_arr[:, -1], rows_with_multiple_drugs).size == 0, f"batches for tf train set contains data rows that has multiple drugs in s2c mode" + assert np.intersect1d(torch_arr[:, -1], rows_with_multiple_drugs).size == 0, f"batches for torch train set contains data rows that has multiple drugs in s2c mode" + + # Assert that the shape of each batch is equal + for tf_arr, torch_arr in zip(tf_train_expr, torch_train_expr): + assert tf_arr.shape == np.array(torch_arr).shape, f"For expr batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + assert np.intersect1d(tf_arr[:, -1], rows_with_multiple_drugs).size == 0, f"batches for tf train set contains data rows that has multiple drugs in s2c mode" + assert np.intersect1d(torch_arr[:, -1], rows_with_multiple_drugs).size == 0, f"batches for torch train set contains data rows that has multiple drugs in s2c mode" + + +# Test for correct input rows for leave-one-out +@pytest.mark.parametrize("drug_index", list(range(0, 13))) +def test_leave_one_out(drug_index): + """ + A function to test if pytorch and tensorflow dataloaders yield the correct rows in the dataset for leave-one-out experiment + """ + experiment_config_path = "/users/ngun7t/Documents/cellbox-jun-6/configs_dev/Example.leave_one_drug_out.json" + loo_label_dir = "/users/ngun7t/Documents/cellbox-jun-6/data/loo_label.csv" + tensorflow_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=True, drug_index=drug_index) + pytorch_dataloader_list, cfg = get_dataloader(experiment_config_path, tensorflow_code=False, drug_index=drug_index) + + # Get the row index that contains single drugs + rows_with_left_out_drugs = loo_row_inds(loo_label_dir, cfg) + + # Code to extract the shape of each yield + for tf_dict, torch_dict in zip(tensorflow_dataloader_list, pytorch_dataloader_list): + tf_train_pert, tf_train_expr = yield_data_from_tensorflow_dataloader( + dataloader=tf_dict["iter_train"], + feed_dict=tf_dict["feed_dict"] + ) + torch_train_pert, torch_train_expr = yield_data_from_pytorch_dataloader( + dataloader=torch_dict["iter_train"] + ) + # Assert that the count of batches obtained is equal + assert len(tf_train_pert) == len(torch_train_pert), "Length of number of arrays yield for train pert not equal" + assert len(tf_train_expr) == len(torch_train_expr), "Length of number of arrays yield for train expr not equal" + + # Assert that the shape of each batch is equal, and also it contains the correct row index + for tf_arr, torch_arr in zip(tf_train_pert, torch_train_pert): + assert tf_arr.shape == np.array(torch_arr).shape, f"For pert batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + assert np.intersect1d(tf_arr[:, -1], rows_with_left_out_drugs).size == 0, f"batches for tf train set contains data rows that has left-out drugs in loo mode" + assert np.intersect1d(torch_arr[:, -1], rows_with_left_out_drugs).size == 0, f"batches for torch train set contains data rows that has left-out drugs in loo mode" + + # Assert that the shape of each batch is equal + for tf_arr, torch_arr in zip(tf_train_expr, torch_train_expr): + assert tf_arr.shape == np.array(torch_arr).shape, f"For expr batches, shape of tf batch = {tf_arr.shape} is not equal to shape of torch batch = {np.array(torch_arr).shape}" + assert np.intersect1d(tf_arr[:, -1], rows_with_left_out_drugs).size == 0, f"batches for tf train set contains data rows that has left-out drugs in loo mode" + assert np.intersect1d(torch_arr[:, -1], rows_with_left_out_drugs).size == 0, f"batches for torch train set contains data rows that has left-out drugs in loo mode" + if __name__ == '__main__': diff --git a/test_utils/__init__.py b/test_utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test_utils/dataloader.py b/test_utils/dataloader.py new file mode 100644 index 0000000..5c7ac32 --- /dev/null +++ b/test_utils/dataloader.py @@ -0,0 +1,181 @@ +import cellbox +import os +import numpy as np +import pandas as pd +import tensorflow.compat.v1 as tf +import shutil +import argparse +import json +from tensorflow.core.protobuf import rewriter_config_pb2 +from tensorflow.compat.v1.errors import OutOfRangeError + +tf.disable_v2_behavior() + +def set_seed(in_seed): + int_seed = int(in_seed) + tf.compat.v1.set_random_seed(int_seed) + np.random.seed(int_seed) + + +def prepare_workdir(in_cfg, working_index, md5): + # Read Data + in_cfg.root_dir = os.getcwd() + in_cfg.node_index = pd.read_csv(in_cfg.node_index_file, header=None, names=None) \ + if hasattr(in_cfg, 'node_index_file') else pd.DataFrame(np.arange(in_cfg.n_x)) + + # Create Output Folder + experiment_path = 'results/{}_{}'.format(in_cfg.experiment_id, md5) + try: + os.makedirs(experiment_path) + except Exception: + pass + out_cfg = vars(in_cfg) + out_cfg = {key: out_cfg[key] for key in out_cfg if type(out_cfg[key]) is not pd.DataFrame} + os.chdir(experiment_path) + json.dump(out_cfg, open('config.json', 'w'), indent=4) + + if "leave one out" in in_cfg.experiment_type: + try: + in_cfg.model_prefix = '{}_{}'.format(in_cfg.model_prefix, in_cfg.drug_index) + except Exception('Drug index not specified') as e: + raise e + + in_cfg.working_index = in_cfg.model_prefix + "_" + str(working_index).zfill(3) + + try: + shutil.rmtree(in_cfg.working_index) + except Exception: + pass + os.makedirs(in_cfg.working_index) + os.chdir(in_cfg.working_index) + + with open("record_eval.csv", 'w') as f: + f.write("epoch,iter,train_loss,valid_loss,train_mse,valid_mse,test_mse,time_elapsed\n") + + print('Working directory is ready at {}.'.format(experiment_path)) + return 0 + + +def get_dataloader(experiment_config_path, tensorflow_code=True, drug_index=None): + """ + Get either the Tensorflow or Pytorch dataloader + """ + working_index = 0 + cfg = cellbox.config.Config(experiment_config_path) + cfg.drug_index = drug_index + cfg.ckpt_path_full = os.path.join('./', cfg.ckpt_name) + md5 = cellbox.utils.md5(cfg) + cfg.seed = working_index + cfg.seed if hasattr(cfg, "seed") else working_index + 1000 + set_seed(cfg.seed) + #print(vars(cfg)) + + prepare_workdir(cfg, working_index, md5) + logger = cellbox.utils.TimeLogger(time_logger_step=1, hierachy=3) + args = cfg + dataloader_list = [] + for i, stage in enumerate(cfg.stages): + set_seed(cfg.seed) + if tensorflow_code: + cfg = cellbox.dataset.factory(cfg) + iter_train = cfg.iter_train + iter_monitor = cfg.iter_monitor + feed_dict = cfg.feed_dicts + data_dict = { + "iter_train": cfg.iter_train, + "iter_monitor": cfg.iter_monitor, + "feed_dict": cfg.feed_dicts + } + else: + cfg = cellbox.dataset_torch.factory(cfg) + iter_train = cfg.iter_train + iter_monitor = cfg.iter_monitor + data_dict = { + "iter_train": cfg.iter_train, + "iter_monitor": cfg.iter_monitor, + } + #args.sub_stages = stage['sub_stages'] + #args.n_T = stage['nT'] + #model = cellbox.model.factory(args) + dataloader_list.append(data_dict) + + return dataloader_list, cfg + + +def yield_data_from_tensorflow_dataloader(dataloader, feed_dict): + """ + Yield the data from Tensorflow dataloader until it is exhausted + """ + config = tf.compat.v1.ConfigProto() + off = rewriter_config_pb2.RewriterConfig.OFF + config.graph_options.rewrite_options.memory_optimization = off + + # Launching session + sess = tf.compat.v1.Session(config=config) + sess.run(tf.compat.v1.global_variables_initializer()) + + items_pert, items_expr = [], [] + next_element = dataloader.get_next() + + sess.run(dataloader.initializer, feed_dict=feed_dict['train_set']) + + try: + while True: + result1, result2 = sess.run(next_element, feed_dict=feed_dict['train_set']) + items_pert.append(result1) + items_expr.append(result2) + except OutOfRangeError: + return items_pert, items_expr + + +def yield_data_from_pytorch_dataloader(dataloader): + """ + Yield the data from Pytorch dataloader until it is exhausted + """ + items_pert, items_expr = [], [] + for pert, expr in dataloader: + items_pert.append(pert) + items_expr.append(expr) + return items_pert, items_expr + + +def s2c_row_inds(loo_label_dir): + """ + Identify the rows of the dataset that only has one drug. + The information is stored in the loo_label file + """ + loo_label = pd.read_csv(loo_label_dir, header=None) + rows_with_single_drugs = loo_label.index[(loo_label[[0, 1]] == 0).any(axis=1)].tolist() + rows_with_multiple_drugs = list(set(list(range(loo_label.shape[0]))) - set(rows_with_single_drugs)) + return rows_with_single_drugs, rows_with_multiple_drugs + + +def loo_row_inds(loo_label_dir, cfg): + """ + Identify the rows of the dataset that leaves out one specific drug + There is some complication in this function, check https://github.com/sanderlab/CellBox/issues/48 + """ + drug_indices_map = [] + for drug_index in range(14): + double_idx = cfg.loo.all(axis=1) + testidx = (cfg.loo == drug_index).any(axis=1) + + if cfg.experiment_type == 'leave one out (w/o single)': + singles = False + elif cfg.experiment_type == 'leave one out (w/ single)': + singles = True + + if singles: + testidx = pd.concat([testidx, double_idx], axis=1) + testidx = testidx.all(axis=1) + + loo_label = pd.read_csv(loo_label_dir, header=None)[testidx] + for i in range(14): + if (loo_label == i).any(axis=1).all(): + drug_indices_map.append(i) + break + + print(f"Drug indices map: {drug_indices_map}") + true_drug_index = drug_indices_map[cfg.drug_index] + loo_label = pd.read_csv(loo_label_dir, header=None) + ind_rows = loo_label.index[(loo_label[[0, 1]] == true_drug_index).any(axis=1)].tolist() + return np.array(ind_rows) \ No newline at end of file