Skip to content

Commit

Permalink
modify to address issue #74
Browse files Browse the repository at this point in the history
  • Loading branch information
kkappler committed Aug 1, 2022
1 parent 76bb5be commit d67aca2
Show file tree
Hide file tree
Showing 10 changed files with 270 additions and 110 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
python --version
conda install -c conda-forge pytest pytest-cov certifi">=2017.4.17" pandoc
pip install -r requirements-dev.txt
pip install git+https://github.com/kujaku11/mt_metadata.git
pip install git+https://github.com/kujaku11/mt_metadata.git@fix_issue_91
pip install git+https://github.com/kujaku11/mth5.git #@branch_name
- name: Install Our Package
Expand Down
64 changes: 64 additions & 0 deletions aurora/channel_nomenclature.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"""
2022-07-31
prototype module to enable handling of channel names that depart from the usual
hexy convention.
"""

DEFAULT_CHANNEL_MAP = {
"hx": "hx",
"hy": "hy",
"hz": "hz",
"ex": "ex",
"ey": "ey",
}

LEMI_CHANNEL_MAP_12 = {
"hx": "bx",
"hy": "by",
"hz": "bz",
"ex": "e1",
"ey": "e2",
}

LEMI_CHANNEL_MAP_34 = {
"hx": "bx",
"hy": "by",
"hz": "bz",
"ex": "e3",
"ey": "e4",
}

THE_BEATLES = {
"hx": "john",
"hy": "paul",
"hz": "george",
"ex": "ringo",
"ey": "the fifth beatle",
}


def get_channel_map(mt_system):
if mt_system == "LEMI12":
channel_map = LEMI_CHANNEL_MAP_12
elif mt_system == "LEMI34":
channel_map = LEMI_CHANNEL_MAP_34
elif mt_system == "NIMS":
channel_map = DEFAULT_CHANNEL_MAP
elif mt_system == "beatles":
channel_map = THE_BEATLES
else:
print(f"whoops mt_system {mt_system} unknown")
channel_map = DEFAULT_CHANNEL_MAP
# raise NotImplementedError
return channel_map


def map_channels(mt_system):
channel_map = get_channel_map(mt_system)
ex = channel_map["ex"]
ey = channel_map["ey"]
hx = channel_map["hx"]
hy = channel_map["hy"]
hz = channel_map["hz"]
return ex, ey, hx, hy, hz
11 changes: 11 additions & 0 deletions aurora/config/metadata/standards/processing.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,16 @@
"alias": [],
"example": "0",
"default": null
},
"channel_nomenclature": {
"type": "string",
"required": true,
"style": "free form",
"units": null,
"description": "key to a dict of how channels are named",
"options": [],
"alias": [],
"example": "LEMI12",
"default": "default"
}
}
19 changes: 12 additions & 7 deletions aurora/pipelines/process_mth5.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,11 +197,15 @@ def process_tf_decimation_level(
config, i_dec_level, local_stft_obj, remote_stft_obj, transfer_function_obj
)

transfer_function_obj.apparent_resistivity(units=units)
return transfer_function_obj


def export_tf(tf_collection, station_metadata_dict={}, survey_dict={}):
def export_tf(
tf_collection,
station_metadata_dict={},
survey_dict={},
channel_nomenclature="default",
):
"""
This method may wind up being embedded in the TF class
Assign transfer_function, residual_covariance, inverse_signal_power, station, survey
Expand All @@ -218,9 +222,8 @@ def export_tf(tf_collection, station_metadata_dict={}, survey_dict={}):
tf_cls: mt_metadata.transfer_functions.core.TF
Transfer function container
"""
merged_tf_dict = tf_collection.get_merged_dict()
tf_cls = TF()
# Transfer Function
merged_tf_dict = tf_collection.get_merged_dict(channel_nomenclature)
tf_cls = TF(channel_nomenclature=channel_nomenclature)
renamer_dict = {"output_channel": "output", "input_channel": "input"}
tmp = merged_tf_dict["tf"].rename(renamer_dict)
tf_cls.transfer_function = tmp
Expand Down Expand Up @@ -425,9 +428,10 @@ def process_mth5(
i_dec_level,
local_merged_stft_obj,
remote_merged_stft_obj,
units=units,
)

tf_obj.apparent_resistivity(
units=units, channel_nomenclature=processing_config.channel_nomenclature
)
tf_dict[i_dec_level] = tf_obj

if show_plot:
Expand Down Expand Up @@ -474,6 +478,7 @@ def process_mth5(
tf_collection,
station_metadata_dict=station_metadata.to_dict(),
survey_dict=survey_dict,
channel_nomenclature=processing_config.channel_nomenclature,
)
close_mths_objs(dataset_df)
return tf_cls
56 changes: 34 additions & 22 deletions aurora/test_utils/synthetic/make_mth5_from_asc.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,21 @@

import numpy as np
from pathlib import Path
from random import seed

import pandas as pd
from mth5.timeseries import ChannelTS, RunTS
from mth5.mth5 import MTH5

from aurora.channel_nomenclature import map_channels
from aurora.test_utils.synthetic.synthetic_station_config import make_filters
from aurora.test_utils.synthetic.synthetic_station_config import make_station_01
from aurora.test_utils.synthetic.synthetic_station_config import make_station_02
from aurora.test_utils.synthetic.synthetic_station_config import make_station_03


seed(0)
np.random.seed(0)


def create_run_ts_from_synthetic_run(run, df):
def create_run_ts_from_synthetic_run(run, df, channel_nomenclature="default"):
"""
Loop over stations and make ChannelTS objects.
Need to add a tag in the channels
Expand All @@ -44,11 +44,12 @@ def create_run_ts_from_synthetic_run(run, df):
-------
"""
EX, EY, HX, HY, HZ = map_channels(channel_nomenclature)
ch_list = []
for col in df.columns:
data = df[col].values

if col in ["ex", "ey"]:
if col in [EX, EY]:
meta_dict = {
"component": col,
"sample_rate": run.sample_rate,
Expand All @@ -59,10 +60,10 @@ def create_run_ts_from_synthetic_run(run, df):
)
# add metadata to the channel here
chts.channel_metadata.dipole_length = 50
if col == "ey":
if col == EY:
chts.channel_metadata.measurement_azimuth = 90.0

elif col in ["hx", "hy", "hz"]:
elif col in [HX, HY, HZ]:
meta_dict = {
"component": col,
"sample_rate": run.sample_rate,
Expand All @@ -71,7 +72,7 @@ def create_run_ts_from_synthetic_run(run, df):
chts = ChannelTS(
channel_type="magnetic", data=data, channel_metadata=meta_dict
)
if col == "hy":
if col == HY:
chts.channel_metadata.measurement_azimuth = 90.0

ch_list.append(chts)
Expand All @@ -86,7 +87,12 @@ def create_run_ts_from_synthetic_run(run, df):


def create_mth5_synthetic_file(
station_cfgs, mth5_path, plot=False, add_nan_values=False, file_version="0.1.0"
station_cfgs,
mth5_path,
plot=False,
add_nan_values=False,
file_version="0.1.0",
channel_nomenclature="default",
):
"""
Expand Down Expand Up @@ -135,7 +141,9 @@ def create_mth5_synthetic_file(
df[col].loc[ndx : ndx + num_nan] = np.nan

# cast to run_ts
runts = create_run_ts_from_synthetic_run(run, df)
runts = create_run_ts_from_synthetic_run(
run, df, channel_nomenclature=channel_nomenclature
)
runts.station_metadata.id = station_cfg.id

# plot the data
Expand All @@ -162,8 +170,8 @@ def create_mth5_synthetic_file(
return mth5_path


def create_test1_h5(file_version="0.1.0"):
station_01_params = make_station_01()
def create_test1_h5(file_version="0.1.0", channel_nomenclature="default"):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
mth5_path = station_01_params.mth5_path # DATA_PATH.joinpath("test1.h5")
mth5_path = create_mth5_synthetic_file(
[
Expand All @@ -172,12 +180,13 @@ def create_test1_h5(file_version="0.1.0"):
mth5_path,
plot=False,
file_version=file_version,
channel_nomenclature=channel_nomenclature,
)
return mth5_path


def create_test2_h5(file_version="0.1.0"):
station_02_params = make_station_02()
def create_test2_h5(file_version="0.1.0", channel_nomenclature="default"):
station_02_params = make_station_02(channel_nomenclature=channel_nomenclature)
mth5_path = station_02_params.mth5_path
mth5_path = create_mth5_synthetic_file(
[
Expand All @@ -190,8 +199,8 @@ def create_test2_h5(file_version="0.1.0"):
return mth5_path


def create_test1_h5_with_nan(file_version="0.1.0"):
station_01_params = make_station_01()
def create_test1_h5_with_nan(file_version="0.1.0", channel_nomenclature="default"):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
mth5_path = station_01_params.mth5_path # DATA_PATH.joinpath("test1.h5")
mth5_path = create_mth5_synthetic_file(
[
Expand All @@ -205,19 +214,22 @@ def create_test1_h5_with_nan(file_version="0.1.0"):
return mth5_path


def create_test12rr_h5(file_version="0.1.0"):
station_01_params = make_station_01()
station_02_params = make_station_02()
def create_test12rr_h5(file_version="0.1.0", channel_nomenclature="default"):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
station_02_params = make_station_02(channel_nomenclature=channel_nomenclature)
station_params = [station_01_params, station_02_params]
mth5_path = station_01_params.mth5_path.__str__().replace("test1.h5", "test12rr.h5")
mth5_path = create_mth5_synthetic_file(
station_params, mth5_path, file_version=file_version
station_params,
mth5_path,
file_version=file_version,
channel_nomenclature=channel_nomenclature,
)
return mth5_path


def create_test3_h5(file_version="0.1.0"):
station_params = make_station_03()
def create_test3_h5(file_version="0.1.0", channel_nomenclature="default"):
station_params = make_station_03(channel_nomenclature=channel_nomenclature)
mth5_path = create_mth5_synthetic_file(
[
station_params,
Expand Down
30 changes: 25 additions & 5 deletions aurora/test_utils/synthetic/make_processing_configs.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
from aurora.channel_nomenclature import map_channels
from aurora.config import BANDS_DEFAULT_FILE
from aurora.config import BANDS_256_FILE
from aurora.config.config_creator import ConfigCreator
from aurora.test_utils.synthetic.paths import CONFIG_PATH
from aurora.test_utils.synthetic.paths import DATA_PATH


def create_test_run_config(test_case_id, ds_df, matlab_or_fortran="", save="json"):
def create_test_run_config(
test_case_id,
ds_df,
matlab_or_fortran="",
save="json",
channel_nomenclature="default",
):
"""
Use config creator to generate a processing config file for the synthetic data.
Expand All @@ -21,18 +28,21 @@ def create_test_run_config(test_case_id, ds_df, matlab_or_fortran="", save="json
"""
EX, EY, HX, HY, HZ = map_channels(channel_nomenclature)
estimation_engine = "RME"
local_station_id = test_case_id
remote_station_id = ""
reference_channels = []
input_channels = [HX, HY]
output_channels = [HZ, EX, EY]
if test_case_id == "test1r2":
estimation_engine = "RME_RR"
reference_channels = ["hx", "hy"]
reference_channels = [HX, HY]
local_station_id = "test1"
remote_station_id = "test2"
if test_case_id == "test2r1":
estimation_engine = "RME_RR"
reference_channels = ["hx", "hy"]
reference_channels = [HX, HY]
local_station_id = "test2"
remote_station_id = "test1"

Expand All @@ -55,8 +65,13 @@ def create_test_run_config(test_case_id, ds_df, matlab_or_fortran="", save="json
cc = ConfigCreator(config_path=CONFIG_PATH)

if test_case_id in ["test1", "test2"]:
p = cc.create_run_processing_object(emtf_band_file=emtf_band_setup_file)
p = cc.create_run_processing_object(
emtf_band_file=emtf_band_setup_file,
input_channels=input_channels,
output_channels=output_channels,
)
p.id = config_id
p.channel_nomenclature = channel_nomenclature
p.stations.from_dataset_dataframe(ds_df)

for decimation in p.decimations:
Expand All @@ -70,8 +85,13 @@ def create_test_run_config(test_case_id, ds_df, matlab_or_fortran="", save="json

elif test_case_id in ["test2r1", "test1r2"]:
config_id = f"{config_id}-RR{remote_station_id}"
p = cc.create_run_processing_object(emtf_band_file=emtf_band_setup_file)
p = cc.create_run_processing_object(
emtf_band_file=emtf_band_setup_file,
input_channels=input_channels,
output_channels=output_channels,
)
p.id = config_id
p.channel_nomenclature = channel_nomenclature
p.stations.from_dataset_dataframe(ds_df)

for decimation in p.decimations:
Expand Down
Loading

0 comments on commit d67aca2

Please sign in to comment.