Skip to content

Commit

Permalink
feat(Stacking): added automated standardize and resize parameter esti…
Browse files Browse the repository at this point in the history
…mation for DataGenerator
  • Loading branch information
muellerdo committed Jul 13, 2022
1 parent bdd4581 commit cbc2510
Showing 1 changed file with 54 additions and 54 deletions.
108 changes: 54 additions & 54 deletions aucmedi/ensemble/stacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,24 +190,6 @@ def train(self, training_generator, epochs=20, iterations=None,
data_train = (*ps_sampling[0], None)
data_val = (*ps_sampling[1], None)

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": temp_dg.standardize_mode,
"resize": temp_dg.resize,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Gather training parameters
parameters_training = {"epochs": epochs,
"iterations": iterations,
Expand Down Expand Up @@ -248,6 +230,24 @@ def train(self, training_generator, epochs=20, iterations=None,
"multiprocessing": self.model_list[i].multiprocessing,
}

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": self.model_list[i].meta_standardize,
"resize": self.model_list[i].meta_input,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Start training process
process_queue = mp.Queue()
process_train = mp.Process(target=__training_process__,
Expand Down Expand Up @@ -304,24 +304,6 @@ def train_metalearner(self, training_generator):
if len(ps_sampling[0]) == 3 : data_ensemble = ps_sampling[2]
else : data_ensemble = (*ps_sampling[2], None)

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": temp_dg.standardize_mode,
"resize": temp_dg.resize,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Identify path to model directory
if isinstance(self.cache_dir, tempfile.TemporaryDirectory):
path_model_dir = self.cache_dir.name
Expand Down Expand Up @@ -351,6 +333,24 @@ def train_metalearner(self, training_generator):
"multiprocessing": self.model_list[i].multiprocessing,
}

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": self.model_list[i].meta_standardize,
"resize": self.model_list[i].meta_input,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Start inference process for model i
process_queue = mp.Queue()
process_pred = mp.Process(target=__prediction_process__,
Expand Down Expand Up @@ -418,24 +418,6 @@ def predict(self, prediction_generator, return_ensemble=False):
# Extract data
data_test = (temp_dg.samples, temp_dg.labels, temp_dg.metadata)

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": temp_dg.standardize_mode,
"resize": temp_dg.resize,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Identify path to model directory
if isinstance(self.cache_dir, tempfile.TemporaryDirectory):
path_model_dir = self.cache_dir.name
Expand Down Expand Up @@ -464,6 +446,24 @@ def predict(self, prediction_generator, return_ensemble=False):
"multiprocessing": self.model_list[i].multiprocessing,
}

# Gather DataGenerator parameters
datagen_paras = {"path_imagedir": temp_dg.path_imagedir,
"batch_size": temp_dg.batch_size,
"data_aug": temp_dg.data_aug,
"seed": temp_dg.seed,
"subfunctions": temp_dg.subfunctions,
"shuffle": temp_dg.shuffle,
"standardize_mode": self.model_list[i].meta_standardize,
"resize": self.model_list[i].meta_input,
"grayscale": temp_dg.grayscale,
"prepare_images": temp_dg.prepare_images,
"sample_weights": temp_dg.sample_weights,
"image_format": temp_dg.image_format,
"loader": temp_dg.sample_loader,
"workers": temp_dg.workers,
"kwargs": temp_dg.kwargs
}

# Start inference process for model i
process_queue = mp.Queue()
process_pred = mp.Process(target=__prediction_process__,
Expand Down

0 comments on commit cbc2510

Please sign in to comment.