Skip to content

Commit

Permalink
Merge pull request #272 from simpeg/fix_issue_271
Browse files Browse the repository at this point in the history
deprecate process_synthetic_data in favour of process_mth5
  • Loading branch information
kkappler authored Jun 10, 2023
2 parents 8cf0fdd + a54faad commit 3bf12cd
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 85 deletions.
1 change: 1 addition & 0 deletions aurora/pipelines/process_mth5.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,7 @@ def process_mth5(
show_plot=False,
z_file_path=None,
return_collection=False,
save_fcs=False
):
"""
This is the main method used to transform a processing_config,
Expand Down
47 changes: 0 additions & 47 deletions aurora/test_utils/synthetic/processing_helpers.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,6 @@
from pathlib import Path

from mt_metadata.transfer_functions.processing.aurora import Processing
from aurora.pipelines.helpers import initialize_config
from aurora.pipelines.process_mth5 import process_mth5


def process_synthetic_data(
processing_config, tfk_dataset, units="MT", z_file_path="", return_collection=False
):
"""
Parameters
----------
processing_config: str or Path, or a Processing() object
where the processing configuration file is found
tfk_dataset: aurora.tf_kernel.dataset.Dataset
class that has a df that describes the runs to be processed.
z_file_path: str or Path
Optional, a place to store the output TF in EMTF z-file format.
Returns
-------
tf_collection:
aurora.transfer_function.transfer_function_collection.TransferFunctionCollection
Container for TF. TransferFunctionCollection will probably be deprecated.
"""
cond1 = isinstance(processing_config, str)
cond2 = isinstance(processing_config, Path)
if cond1 or cond2:
# load from a json path or string
print("Not tested since implementation of new mt_metadata Processing object")
config = initialize_config(processing_config)
elif isinstance(processing_config, Processing):
config = processing_config
else:
print(f"processing_config has unexpected type {type(processing_config)}")
raise Exception

tf_collection = process_mth5(
config,
tfk_dataset,
units=units,
z_file_path=z_file_path,
return_collection=return_collection,
)
return tf_collection


def tf_obj_from_synthetic_data(mth5_path):
"""Helper function for test_issue_139"""
from aurora.config.config_creator import ConfigCreator
Expand Down
14 changes: 7 additions & 7 deletions tests/io/test_issue_139.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,13 @@ def test_tf_read_and_write(self):
tf_z.write(out_file_path)
print("Add assert statement that the zrr are the same")

def test_tf_write_and_read(self):
tf_obj = self.tf_obj
tf_obj.write(fn=self.xml_file_base, file_type="emtfxml")

tf_obj2 = TF()
tf_obj2.read(fn=self.xml_file_base)
print("ASSERT tfobj==tfob2 everywhere it hsould")
# def test_tf_write_and_read(self):
# tf_obj = self.tf_obj
# tf_obj.write(fn=self.xml_file_base, file_type="emtfxml")
#
# tf_obj2 = TF()
# tf_obj2.read(fn=self.xml_file_base)
# print("ASSERT tfobj==tfob2 everywhere it should")


def main():
Expand Down
23 changes: 7 additions & 16 deletions tests/parkfield/test_process_parkfield_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,18 @@
from mth5.helpers import close_open_files


def test_processing(return_collection=False, z_file_path=None, test_clock_zero=False):
def test_processing(z_file_path=None, test_clock_zero=False):
"""
Parameters
----------
return_collection: bool
Controls dtype of returned object
z_file_path: str or Path or None
Where to store zfile
Returns
-------
tf_cls: TF object,
if return_collection is True:
aurora.transfer_function.transfer_function_collection.TransferFunctionCollection
if return_collection is False:
mt_metadata.transfer_functions.core.TF
tf_cls: mt_metadata.transfer_functions.core.TF
The TF object,
"""
close_open_files()
h5_path = ensure_h5_exists()
Expand Down Expand Up @@ -62,14 +58,9 @@ def test_processing(return_collection=False, z_file_path=None, test_clock_zero=F
units="MT",
show_plot=show_plot,
z_file_path=z_file_path,
return_collection=return_collection,
)

if return_collection:
tf_collection = tf_cls
return tf_collection
else:
tf_cls.write(fn="emtfxml_test.xml", file_type="emtfxml")
tf_cls.write(fn="emtfxml_test.xml", file_type="emtfxml")
return tf_cls


Expand All @@ -80,13 +71,13 @@ def test():
logging.getLogger("matplotlib.ticker").disabled = True

z_file_path = AURORA_RESULTS_PATH.joinpath("pkd.zss")
test_processing(return_collection=True, z_file_path=z_file_path)
tf_cls = test_processing(z_file_path=z_file_path)
tf_cls.write("pkd_mt_metadata.zss", file_type="zss")
test_processing(
z_file_path=z_file_path,
test_clock_zero="user specified",
)
test_processing(z_file_path=z_file_path, test_clock_zero="data start")
test_processing(z_file_path=z_file_path)

# COMPARE WITH ARCHIVED Z-FILE
auxilliary_z_file = EMTF_RESULTS_PATH.joinpath("PKD_272_00.zrr")
Expand Down
8 changes: 3 additions & 5 deletions tests/synthetic/test_compare_aurora_vs_archived_emtf.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from aurora.sandbox.io_helpers.zfile_murphy import read_z_file
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test1_h5
Expand All @@ -8,9 +9,6 @@
)
from aurora.test_utils.synthetic.paths import AURORA_RESULTS_PATH
from aurora.test_utils.synthetic.paths import EMTF_OUTPUT_PATH
from aurora.test_utils.synthetic.processing_helpers import (
process_synthetic_data,
)
from aurora.test_utils.synthetic.rms_helpers import assert_rms_misfit_ok
from aurora.test_utils.synthetic.rms_helpers import compute_rms
from aurora.test_utils.synthetic.rms_helpers import get_expected_rms_misfit
Expand Down Expand Up @@ -74,9 +72,9 @@ def aurora_vs_emtf(
expected_rms_misfit = get_expected_rms_misfit(test_case_id, emtf_version)
z_file_path = AURORA_RESULTS_PATH.joinpath(z_file_base)

tf_collection = process_synthetic_data(
tf_collection = process_mth5(
processing_config,
tfk_dataset,
tfk_dataset=tfk_dataset,
z_file_path=z_file_path,
return_collection=True,
)
Expand Down
19 changes: 9 additions & 10 deletions tests/synthetic/test_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import pathlib
import unittest

from aurora.pipelines.process_mth5 import process_mth5
from aurora.pipelines.run_summary import RunSummary
from aurora.pipelines.transfer_function_kernel import TransferFunctionKernel
from aurora.test_utils.synthetic.make_mth5_from_asc import create_test1_h5
Expand All @@ -11,9 +12,6 @@
create_test_run_config,
)
from aurora.test_utils.synthetic.paths import AURORA_RESULTS_PATH
from aurora.test_utils.synthetic.processing_helpers import (
process_synthetic_data,
)
from aurora.transfer_function.kernel_dataset import KernelDataset
from mth5.helpers import close_open_files

Expand Down Expand Up @@ -204,9 +202,9 @@ def process_synthetic_1(
for decimation in processing_config.decimations:
decimation.estimator.estimate_per_channel = False

tf_result = process_synthetic_data(
tf_result = process_mth5(
processing_config,
tfk_dataset,
tfk_dataset=tfk_dataset,
z_file_path=z_file_path,
return_collection=return_collection,
)
Expand Down Expand Up @@ -236,7 +234,7 @@ def process_synthetic_2():
tfk_dataset = KernelDataset()
tfk_dataset.from_run_summary(run_summary, "test2")
processing_config = create_test_run_config("test2", tfk_dataset)
tfc = process_synthetic_data(processing_config, tfk_dataset)
tfc = process_mth5(processing_config, tfk_dataset=tfk_dataset)
return tfc


Expand All @@ -256,9 +254,9 @@ def process_synthetic_1r2(
processing_config = create_test_run_config(
config_keyword, tfk_dataset, channel_nomenclature=channel_nomenclature
)
tfc = process_synthetic_data(
tfc = process_mth5(
processing_config,
tfk_dataset,
tfk_dataset=tfk_dataset,
return_collection=return_collection,
)
return tfc
Expand All @@ -270,6 +268,7 @@ def main():
"""
# tmp = TestSyntheticProcessing()
# tmp.setUp()
# tmp.test_can_output_tf_class_and_write_tf_xml()
# tmp.test_no_crash_with_too_many_decimations()
# tmp.test_can_use_scale_factor_dictionary()
unittest.main()
Expand All @@ -292,7 +291,7 @@ def main():
# test_config = CONFIG_PATH.joinpath("test1_run_config_underdetermined.json")
# # test_config = Path("config", "test1_run_config_underdetermined.json")
# run_id = "001"
# process_synthetic_data(test_config, run_id, units="MT")
# process_mth5(test_config, run_id, units="MT")
#
#
# def process_synthetic_1_with_nans():
Expand All @@ -305,4 +304,4 @@ def main():
# test_config = CONFIG_PATH.joinpath("test1_run_config_nan.json")
# # test_config = Path("config", "test1_run_config_nan.json")
# run_id = "001"
# process_synthetic_data(test_config, run_id, units="MT")
# process_mth5(test_config, run_id, units="MT")

0 comments on commit 3bf12cd

Please sign in to comment.