From 716038e6c69031f6f6d374e10e0b8964b09cd523 Mon Sep 17 00:00:00 2001 From: Lachlan Grose Date: Fri, 9 Feb 2024 15:37:15 +1100 Subject: [PATCH] fix: making code compatible with linter --- .../interpolators/_interpolator_factory.py | 6 +- .../supports/_2d_p1_unstructured.py | 2 +- .../modelling/features/_geological_feature.py | 1 - .../features/builders/_base_builder.py | 8 +- .../features/builders/_fault_builder.py | 74 ++---- .../builders/_geological_feature_builder.py | 110 ++++----- .../builders/_structural_frame_builder.py | 2 +- .../features/fault/_fault_segment.py | 2 +- .../modelling/features/fold/_fold.py | 5 +- .../features/fold/_fold_rotation_angle.py | 3 +- .../modelling/features/fold/_foldframe.py | 2 +- .../modelling/input/map2loop_processor.py | 3 +- .../modelling/input/process_data.py | 28 +-- .../modelling/input/project_file.py | 4 +- .../intrusions/geom_conceptual_models.py | 10 +- .../intrusions/geometric_scaling_functions.py | 7 +- .../modelling/intrusions/intrusion_builder.py | 230 +++++++----------- .../modelling/intrusions/intrusion_feature.py | 50 ++-- .../intrusions/intrusion_frame_builder.py | 174 +++++-------- .../intrusions/intrusion_support_functions.py | 9 +- LoopStructural/utils/helper.py | 3 - LoopStructural/utils/linalg.py | 2 +- pyproject.toml | 3 +- 23 files changed, 271 insertions(+), 467 deletions(-) diff --git a/LoopStructural/interpolators/_interpolator_factory.py b/LoopStructural/interpolators/_interpolator_factory.py index dd92a62fd..1d261c1bc 100644 --- a/LoopStructural/interpolators/_interpolator_factory.py +++ b/LoopStructural/interpolators/_interpolator_factory.py @@ -20,11 +20,11 @@ def create_interpolator( support=None, buffer: float = 0.2, ): - if interpolatortype == None: + if interpolatortype is None: raise ValueError("No interpolator type specified") - if boundingbox == None: + if boundingbox is None: raise ValueError("No bounding box specified") - if nelements == None: + if nelements is None: raise ValueError("No number of elements specified") if isinstance(interpolatortype, str): interpolatortype = interpolator_string_map[interpolatortype] diff --git a/LoopStructural/interpolators/supports/_2d_p1_unstructured.py b/LoopStructural/interpolators/supports/_2d_p1_unstructured.py index c58af1531..3748587ae 100644 --- a/LoopStructural/interpolators/supports/_2d_p1_unstructured.py +++ b/LoopStructural/interpolators/supports/_2d_p1_unstructured.py @@ -31,7 +31,7 @@ def evaluate_shape_derivatives(self, locations, elements=None): M[:, :, 1:] = self.vertices[self.elements[elements], :][:, :3, :] points_ = np.ones((locations.shape[0], 3)) points_[:, 1:] = locations - minv = np.linalg.inv(M) + # minv = np.linalg.inv(M) # c = np.einsum("lij,li->lj", minv, points_) vertices = self.nodes[self.elements[tri][:, :3]] diff --git a/LoopStructural/modelling/features/_geological_feature.py b/LoopStructural/modelling/features/_geological_feature.py index ffcd8e89e..d46bdaf0b 100644 --- a/LoopStructural/modelling/features/_geological_feature.py +++ b/LoopStructural/modelling/features/_geological_feature.py @@ -141,7 +141,6 @@ def evaluate_gradient(self, pos: np.ndarray) -> np.ndarray: v = np.zeros(pos.shape) v[:] = np.nan mask = self._calculate_mask(pos) - original_pos = pos.copy() pos, axis, angle = self._apply_faults(pos) if mask.dtype not in [int, bool]: logger.error(f"Unable to evaluate gradient for {self.name}") diff --git a/LoopStructural/modelling/features/builders/_base_builder.py b/LoopStructural/modelling/features/builders/_base_builder.py index e91a0e8fb..4ec10c079 100644 --- a/LoopStructural/modelling/features/builders/_base_builder.py +++ b/LoopStructural/modelling/features/builders/_base_builder.py @@ -44,9 +44,7 @@ def update(self): self.build(**self.build_arguments) def build(self, **kwargs): - raise NotImplementedError( - "BaseBuilder should be inherited and build method overwritten" - ) + raise NotImplementedError("BaseBuilder should be inherited and build method overwritten") @property def name(self): @@ -66,13 +64,13 @@ def up_to_date(self, callback=None): for f in self.faults: f.builder.up_to_date(callback=callback) # has anything changed in the builder since we built the feature? if so update - if self._up_to_date == False: + if not self._up_to_date: self.update() if callable(callback): callback(1) return # check if the interpolator is up to date, if not solve - if self._interpolator.up_to_date == False: + if not self._interpolator.up_to_date: self.update() if callable(callback): callback(1) diff --git a/LoopStructural/modelling/features/builders/_fault_builder.py b/LoopStructural/modelling/features/builders/_fault_builder.py index 112084c2c..726aef983 100644 --- a/LoopStructural/modelling/features/builders/_fault_builder.py +++ b/LoopStructural/modelling/features/builders/_fault_builder.py @@ -48,15 +48,11 @@ def __init__( self.frame.model = model self.model = model self.origin = np.array([np.nan, np.nan, np.nan]) - self.maximum = np.array( - [np.nan, np.nan, np.nan] - ) # self.model.bounding_box[1, :] + self.maximum = np.array([np.nan, np.nan, np.nan]) # self.model.bounding_box[1, :] # define a maximum area to mesh adding buffer to model # buffer = .2 self.minimum_origin = bounding_box.with_buffer(fault_bounding_box_buffer).origin - self.maximum_maximum = bounding_box.with_buffer( - fault_bounding_box_buffer - ).maximum + self.maximum_maximum = bounding_box.with_buffer(fault_bounding_box_buffer).maximum self.fault_normal_vector = None self.fault_slip_vector = None @@ -68,9 +64,7 @@ def __init__( def update_geometry(self, points): self.origin = np.nanmin(np.array([np.min(points, axis=0), self.origin]), axis=0) - self.maximum = np.nanmax( - np.array([np.max(points, axis=0), self.maximum]), axis=0 - ) + self.maximum = np.nanmax(np.array([np.max(points, axis=0), self.maximum]), axis=0) self.origin[self.origin < self.minimum_origin] = self.minimum_origin[ self.origin < self.minimum_origin ] @@ -117,14 +111,10 @@ def create_data_from_geometry( intermediate_axis : double fault volume radius in the slip direction """ - trace_mask = np.logical_and( - fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0 - ) + trace_mask = np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0) logger.info(f"There are {np.sum(trace_mask)} points on the fault trace") if np.sum(trace_mask) == 0: - logger.error( - "You cannot model a fault without defining the location of the fault" - ) + logger.error("You cannot model a fault without defining the location of the fault") raise ValueError("There are no points on the fault trace") # get all of the gradient data associated with the fault trace @@ -132,9 +122,7 @@ def create_data_from_geometry( gradient_mask = np.logical_and( fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["gz"]) ) - vector_data = fault_frame_data.loc[ - gradient_mask, ["gx", "gy", "gz"] - ].to_numpy() + vector_data = fault_frame_data.loc[gradient_mask, ["gx", "gy", "gz"]].to_numpy() normal_mask = np.logical_and( fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["nz"]) ) @@ -196,11 +184,7 @@ def create_data_from_geometry( trace_mask = np.logical_and( fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0 ) - fault_center = ( - fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]] - .mean(axis=0) - .to_numpy() - ) + fault_center = fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]].mean(axis=0).to_numpy() self.fault_normal_vector = fault_normal_vector self.fault_slip_vector = fault_slip_vector @@ -208,14 +192,10 @@ def create_data_from_geometry( self.fault_centre = fault_center if major_axis is None: fault_trace = fault_frame_data.loc[ - np.logical_and( - fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0 - ), + np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0), ["X", "Y"], ].to_numpy() - distance = np.linalg.norm( - fault_trace[:, None, :] - fault_trace[None, :, :], axis=2 - ) + distance = np.linalg.norm(fault_trace[:, None, :] - fault_trace[None, :, :], axis=2) if len(distance) == 0 or np.sum(distance) == 0: logger.warning("There is no fault trace for {}".format(self.name)) # this can mean there is only a single data point for @@ -230,15 +210,11 @@ def create_data_from_geometry( logger.warning(f"Fault major axis using map length: {major_axis}") if minor_axis is None: - logger.info( - f"Fault minor axis not set, using half major axis: {major_axis/2}" - ) + logger.info(f"Fault minor axis not set, using half major axis: {major_axis/2}") minor_axis = major_axis / 2.0 if intermediate_axis is None: intermediate_axis = major_axis - logger.info( - f"Fault intermediate axis not set, using major axis: {intermediate_axis}" - ) + logger.info(f"Fault intermediate axis not set, using major axis: {intermediate_axis}") self.fault_minor_axis = minor_axis self.fault_major_axis = major_axis self.fault_intermediate_axis = intermediate_axis @@ -309,9 +285,9 @@ def create_data_from_geometry( fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["nx"]), ) - fault_frame_data.loc[mask, ["gx", "gy", "gz"]] = ( - fault_frame_data.loc[mask, ["nx", "ny", "nz"]] - ) + fault_frame_data.loc[mask, ["gx", "gy", "gz"]] = fault_frame_data.loc[ + mask, ["nx", "ny", "nz"] + ] fault_frame_data.loc[mask, ["nx", "ny", "nz"]] = np.nan mask = np.logical_and( @@ -319,10 +295,8 @@ def create_data_from_geometry( ~np.isnan(fault_frame_data["gx"]), ) fault_frame_data.loc[mask, ["gx", "gy", "gz"]] /= minor_axis * 0.5 - if points == False: - logger.info( - "Rescaling fault norm constraint length for fault frame" - ) + if not points: + logger.info("Rescaling fault norm constraint length for fault frame") mask = np.logical_and( fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["gx"]), @@ -415,12 +389,8 @@ def create_data_from_geometry( ] strike_vector /= major_axis if intermediate_axis is not None: - fault_depth[0, :] = ( - fault_center[:3] + fault_slip_vector * intermediate_axis - ) - fault_depth[1, :] = ( - fault_center[:3] - fault_slip_vector * intermediate_axis - ) + fault_depth[0, :] = fault_center[:3] + fault_slip_vector * intermediate_axis + fault_depth[1, :] = fault_center[:3] - fault_slip_vector * intermediate_axis fault_frame_data.loc[ len(fault_frame_data), ["X", "Y", "Z", "feature_name", "val", "coord", "w"], @@ -543,9 +513,7 @@ def add_fault_trace_anisotropy(self, w: float = 1.0): anisotropy_feature = AnalyticalGeologicalFeature( vector=vector_data, origin=[0, 0, 0], name="fault_trace_anisotropy" ) - self.builders[0].add_orthogonal_feature( - anisotropy_feature, w=w, region=None, step=1, B=0 - ) + self.builders[0].add_orthogonal_feature(anisotropy_feature, w=w, region=None, step=1, B=0) def add_fault_dip_anisotropy(self, dip: np.ndarray, w: float = 1.0): """_summary_ @@ -576,9 +544,7 @@ def add_fault_dip_anisotropy(self, dip: np.ndarray, w: float = 1.0): anisotropy_feature = AnalyticalGeologicalFeature( vector=vector_data, origin=[0, 0, 0], name="fault_dip_anisotropy" ) - self.builders[0].add_orthogonal_feature( - anisotropy_feature, w=w, region=None, step=1, B=0 - ) + self.builders[0].add_orthogonal_feature(anisotropy_feature, w=w, region=None, step=1, B=0) def update(self): for i in range(3): diff --git a/LoopStructural/modelling/features/builders/_geological_feature_builder.py b/LoopStructural/modelling/features/builders/_geological_feature_builder.py index a6af4da88..0666db486 100644 --- a/LoopStructural/modelling/features/builders/_geological_feature_builder.py +++ b/LoopStructural/modelling/features/builders/_geological_feature_builder.py @@ -24,7 +24,7 @@ from ....utils.helper import ( get_data_bounding_box_map as get_data_bounding_box, ) -from ....utils import RegionEverywhere +from ....utils import RegionEverywhere, rng from ....interpolators import DiscreteInterpolator from ....interpolators import InterpolatorFactory @@ -59,11 +59,9 @@ def __init__( nelements=nelements, ) - if issubclass(type(interpolator), GeologicalInterpolator) == False: + if not issubclass(type(interpolator), GeologicalInterpolator): raise TypeError( - "interpolator is {} and must be a GeologicalInterpolator".format( - type(interpolator) - ) + "interpolator is {} and must be a GeologicalInterpolator".format(type(interpolator)) ) self._interpolator = interpolator @@ -157,9 +155,7 @@ def add_orthogonal_feature(self, feature, w=1.0, region=None, step=1, B=0): self._orthogonal_features[feature.name] = [feature, w, region, step, B] self._up_to_date = False - def add_data_to_interpolator( - self, constrained=False, force_constrained=False, **kwargs - ): + def add_data_to_interpolator(self, constrained=False, force_constrained=False, **kwargs): """ Iterates through the list of data and applies any faults active on the data in the order they are added @@ -180,9 +176,7 @@ def add_data_to_interpolator( data = self.data.copy() # convert data locations to numpy array and then update for f in self.faults: - data.loc[:, xyz_names()], axis, angle = f.apply_to_points( - data.loc[:, xyz_names()] - ) + data.loc[:, xyz_names()], axis, angle = f.apply_to_points(data.loc[:, xyz_names()]) # self.check_interpolation_geometry(data.loc[:,xyz_names()].to_numpy()) # Now check whether there are enough constraints for the # interpolator to be able to solve @@ -193,9 +187,9 @@ def add_data_to_interpolator( # Change normals to gradients mask = np.all(~np.isnan(data.loc[:, normal_vec_names()]), axis=1) if mask.shape[0] > 0: - data.loc[mask, gradient_vec_names()] = data.loc[ - mask, normal_vec_names() - ].to_numpy(float) + data.loc[mask, gradient_vec_names()] = data.loc[mask, normal_vec_names()].to_numpy( + float + ) data.loc[mask, normal_vec_names()] = np.nan if self.get_norm_constraints().shape[0] > 0: constrained = True @@ -207,15 +201,13 @@ def add_data_to_interpolator( # change gradient constraints to normal vector constraints mask = np.all(~np.isnan(data.loc[:, gradient_vec_names()]), axis=1) if mask.shape[0] > 0: - data.loc[mask, normal_vec_names()] = data.loc[ - mask, gradient_vec_names() - ].to_numpy(float) + data.loc[mask, normal_vec_names()] = data.loc[mask, gradient_vec_names()].to_numpy( + float + ) data.loc[mask, gradient_vec_names()] = np.nan logger.info("Setting gradient points to norm constraints") constrained = True - mask = np.all( - ~np.isnan(data.loc[:, normal_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(data.loc[:, normal_vec_names()].to_numpy(float)), axis=1) if not constrained: logger.error("Not enough constraints for scalar field add more") @@ -223,15 +215,13 @@ def add_data_to_interpolator( mask = ~np.isnan(data.loc[:, val_name()].to_numpy(float)) # add value constraints if mask.shape[0] > 0: - value_data = data.loc[ - mask[:, 0], xyz_names() + val_name() + weight_name() - ].to_numpy(float) + value_data = data.loc[mask[:, 0], xyz_names() + val_name() + weight_name()].to_numpy( + float + ) self.interpolator.set_value_constraints(value_data) # add gradient constraints - mask = np.all( - ~np.isnan(data.loc[:, gradient_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(data.loc[:, gradient_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: gradient_data = data.loc[ mask, xyz_names() + gradient_vec_names() + weight_name() @@ -239,19 +229,15 @@ def add_data_to_interpolator( self.interpolator.set_gradient_constraints(gradient_data) # add normal vector data - mask = np.all( - ~np.isnan(data.loc[:, normal_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(data.loc[:, normal_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - normal_data = data.loc[ - mask, xyz_names() + normal_vec_names() + weight_name() - ].to_numpy(float) + normal_data = data.loc[mask, xyz_names() + normal_vec_names() + weight_name()].to_numpy( + float + ) self.interpolator.set_normal_constraints(normal_data) # add tangent data - mask = np.all( - ~np.isnan(data.loc[:, tangent_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(data.loc[:, tangent_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: tangent_data = data.loc[ mask, xyz_names() + tangent_vec_names() + weight_name() @@ -268,9 +254,7 @@ def add_data_to_interpolator( # add inequality constraints mask = np.all(~np.isnan(data.loc[:, inequality_name()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - inequality_data = data.loc[mask, xyz_names() + inequality_name()].to_numpy( - float - ) + inequality_data = data.loc[mask, xyz_names() + inequality_name()].to_numpy(float) self.interpolator.set_inequality_constraints(inequality_data) self.data_added = True @@ -287,7 +271,7 @@ def install_gradient_constraint(self): vector[norm > 0] /= norm[norm > 0, None] element_idx = np.arange(self.interpolator.support.n_elements) - np.random.shuffle(element_idx) + rng.shuffle(element_idx) self.interpolator.add_gradient_orthogonal_constraints( self.interpolator.support.barycentre[element_idx[::step], :], vector[element_idx[::step], :], @@ -335,13 +319,11 @@ def get_gradient_constraints(self): ------- numpy array """ - mask = np.all( - ~np.isnan(self.data.loc[:, gradient_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(self.data.loc[:, gradient_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - return self.data.loc[ - mask, xyz_names() + gradient_vec_names() + weight_name() - ].to_numpy(float) + return self.data.loc[mask, xyz_names() + gradient_vec_names() + weight_name()].to_numpy( + float + ) else: return np.zeros((0, 7)) @@ -352,13 +334,11 @@ def get_tangent_constraints(self): ------- numpy array """ - mask = np.all( - ~np.isnan(self.data.loc[:, tangent_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(self.data.loc[:, tangent_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - return self.data.loc[ - mask, xyz_names() + tangent_vec_names() + weight_name() - ].to_numpy(float) + return self.data.loc[mask, xyz_names() + tangent_vec_names() + weight_name()].to_numpy( + float + ) else: return np.zeros((0, 7)) @@ -370,13 +350,11 @@ def get_norm_constraints(self): ------- numpy array """ - mask = np.all( - ~np.isnan(self.data.loc[:, normal_vec_names()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(self.data.loc[:, normal_vec_names()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - return self.data.loc[ - mask, xyz_names() + normal_vec_names() + weight_name() - ].to_numpy(float) + return self.data.loc[mask, xyz_names() + normal_vec_names() + weight_name()].to_numpy( + float + ) else: return np.zeros((0, 7)) @@ -393,13 +371,11 @@ def get_orientation_constraints(self): return np.vstack([gradient_constraints, normal_constraints]) def get_interface_constraints(self): - mask = np.all( - ~np.isnan(self.data.loc[:, interface_name()].to_numpy(float)), axis=1 - ) + mask = np.all(~np.isnan(self.data.loc[:, interface_name()].to_numpy(float)), axis=1) if mask.shape[0] > 0: - return self.data.loc[ - mask, xyz_names() + interface_name() + weight_name() - ].to_numpy(float) + return self.data.loc[mask, xyz_names() + interface_name() + weight_name()].to_numpy( + float + ) else: return np.zeros((0, 5)) @@ -439,9 +415,7 @@ def set_interpolation_geometry(self, origin, maximum, rotation=None): self._up_to_date = False while self.interpolator.nx < 100: - self.interpolator.support.step_vector = ( - self.interpolator.support.step_vector * 0.9 - ) + self.interpolator.support.step_vector = self.interpolator.support.step_vector * 0.9 def check_interpolation_geometry(self, data): """Check the interpolation support geometry @@ -449,9 +423,7 @@ def check_interpolation_geometry(self, data): origin = self.interpolator.support.origin maximum = self.interpolator.support.maximum print(origin, maximum) - origin[origin < np.min(data, axis=0)] = np.min(data, axis=0)[ - origin < np.min(data, axis=0) - ] + origin[origin < np.min(data, axis=0)] = np.min(data, axis=0)[origin < np.min(data, axis=0)] maximum[maximum < np.max(data, axis=0)] = np.max(data, axis=0)[ maximum < np.max(data, axis=0) ] diff --git a/LoopStructural/modelling/features/builders/_structural_frame_builder.py b/LoopStructural/modelling/features/builders/_structural_frame_builder.py index 12befaa5e..1ab7dcc9e 100644 --- a/LoopStructural/modelling/features/builders/_structural_frame_builder.py +++ b/LoopStructural/modelling/features/builders/_structural_frame_builder.py @@ -195,7 +195,7 @@ def setup(self, w1=1.0, w2=1.0, w3=1.0, **kwargs): logger.info(f"Building {self.name} coordinate 0") kwargs["regularisation"] = regularisation[0] self.builders[0].build_arguments = kwargs - fold = kwargs.pop("fold", None) + kwargs.pop("fold", None) # make sure that all of the coordinates are using the same region if len(self.builders[2].data) > 0: diff --git a/LoopStructural/modelling/features/fault/_fault_segment.py b/LoopStructural/modelling/features/fault/_fault_segment.py index 34716251e..28c2996f0 100644 --- a/LoopStructural/modelling/features/fault/_fault_segment.py +++ b/LoopStructural/modelling/features/fault/_fault_segment.py @@ -360,7 +360,7 @@ def apply_to_points(self, points, reverse=False): # multiply displacement vector by the displacement magnitude for # step g *= (1.0 / steps) * d[:, None] - prev_p = newp[mask, :].copy() + #newp[mask, :].copy() # apply displacement newp[mask, :] += g # axis[mask, i, :] = np.cross(prev_p, newp[mask, :], axisa=1, axisb=1) diff --git a/LoopStructural/modelling/features/fold/_fold.py b/LoopStructural/modelling/features/fold/_fold.py index 3cbdc592e..4060e2c1d 100644 --- a/LoopStructural/modelling/features/fold/_fold.py +++ b/LoopStructural/modelling/features/fold/_fold.py @@ -1,4 +1,3 @@ - import numpy as np from ....utils import getLogger @@ -110,11 +109,11 @@ def get_deformed_orientation(self, points): # if its less than 0 then inverse dgz d = np.einsum("ij,ik->i", fold_direction, fold_axis) - if self.invert_norm == True: + if self.invert_norm: new_dgz = -dgz[mask][d[mask] < 0] return fold_direction, fold_axis, new_dgz - elif self.invert_norm == False: + elif not self.invert_norm: return fold_direction, fold_axis, dgz else: logger.warning("invert fold frame param not valid. Defaulting to false.") diff --git a/LoopStructural/modelling/features/fold/_fold_rotation_angle.py b/LoopStructural/modelling/features/fold/_fold_rotation_angle.py index 41bb36069..d626deca1 100644 --- a/LoopStructural/modelling/features/fold/_fold_rotation_angle.py +++ b/LoopStructural/modelling/features/fold/_fold_rotation_angle.py @@ -1,4 +1,3 @@ - import numpy as np from scipy.optimize import curve_fit @@ -48,7 +47,7 @@ def fit_fourier_series( """ if self.svario is None: self.svario = SVariogram(self.fold_frame_coordinate, self.rotation_angle) - if skip_variogram == False: + if not skip_variogram: self.svario.calc_semivariogram(lags=lags, nlag=nlag, lag=lag) if wl is None: wl = self.svario.find_wavelengths(lags=lags, nlag=nlag, lag=lag) diff --git a/LoopStructural/modelling/features/fold/_foldframe.py b/LoopStructural/modelling/features/fold/_foldframe.py index 257194858..3210d2fd3 100644 --- a/LoopStructural/modelling/features/fold/_foldframe.py +++ b/LoopStructural/modelling/features/fold/_foldframe.py @@ -72,7 +72,7 @@ def calculate_fold_axis_rotation(self, feature_builder, fold_axis=None): far = np.einsum("ij,ij->i", projected_l1, projected_s1gyg) far = np.rad2deg(np.arccos(far)) # scalar triple product - stp = np.einsum("ij,ij->i", np.cross(l1, s1gyg, axisa=1, axisb=1), s1g) + #np.einsum("ij,ij->i", np.cross(l1, s1gyg, axisa=1, axisb=1), s1g) # check bounds far -= 90 # far[stp < 0] = 360.- far[stp < 0] diff --git a/LoopStructural/modelling/input/map2loop_processor.py b/LoopStructural/modelling/input/map2loop_processor.py index b7e4261d0..f49c2e6c1 100644 --- a/LoopStructural/modelling/input/map2loop_processor.py +++ b/LoopStructural/modelling/input/map2loop_processor.py @@ -67,7 +67,6 @@ def __init__(self, m2l_directory, use_thickness=None): fault_locations.rename(columns={"formation": "fault_name"}, inplace=True) contacts.rename(columns={"formation": "name"}, inplace=True) orientations.rename(columns={"formation": "name"}, inplace=True) - intrusions = None fault_stratigraphy = None # make sure supergroups are in the groups dataframe @@ -115,7 +114,7 @@ def __init__(self, m2l_directory, use_thickness=None): for strat in fault_strat["supergroup"].unique(): mask = (fault_strat.loc[fault_strat["supergroup"] == strat, :] == 1).to_numpy() fault_stratigraphy[strat] = fault_strat.columns[mask[0, :]].tolist() - ip = super().__init__( + super().__init__( contacts, orientations, stratigraphic_order, diff --git a/LoopStructural/modelling/input/process_data.py b/LoopStructural/modelling/input/process_data.py index f2dc587b3..3c8ea6dfe 100644 --- a/LoopStructural/modelling/input/process_data.py +++ b/LoopStructural/modelling/input/process_data.py @@ -1,8 +1,7 @@ import pandas as pd import numpy as np from .fault_network import FaultNetwork -from ...utils import strikedip2vector -from ...utils import getLogger +from ...utils import getLogger, rng, strikedip2vector logger = getLogger(__name__) @@ -134,13 +133,13 @@ def stratigraphic_order(self, stratigraphic_order): if stratigraphic_order is None: logger.warning("No stratigraphic order provided") return - if isinstance(stratigraphic_order[0][1], list) == False: + if not isinstance(stratigraphic_order[0][1], list): raise TypeError( "Stratigraphic_order must of the format [[('group_name',['unit1','unit2']),('group_name2',['unit3','unit4'])]]" ) - if isinstance(stratigraphic_order, list) == False: + if not isinstance(stratigraphic_order, list): raise TypeError("Stratigraphic_order must be a list") - if isinstance(stratigraphic_order[0][1][0], str) == False: + if not isinstance(stratigraphic_order[0][1][0], str): raise TypeError("Stratigraphic_order elements must be strings") self.stratigraphy = True self._stratigraphic_order = stratigraphic_order @@ -154,7 +153,7 @@ def colours(self, colours): if colours is None: self._colours = {} for s in self.stratigraphic_name: - self._colours[s] = np.random.random(3) + self._colours[s] = rng.random(3) else: self._colours = colours @@ -222,7 +221,7 @@ def stratigraphy_cmap(self, supergroup="supergroup_0"): supergroup not in stratigraphic column """ try: - from matplotlib import cm + # from matplotlib import cm from matplotlib import colors except ImportError: logger.error("matplotlib is needed for creating a custom colourmap") @@ -232,10 +231,9 @@ def stratigraphy_cmap(self, supergroup="supergroup_0"): raise ValueError(f"supergroup {supergroup} not in stratigraphic column") colours = [] boundaries = [] - data = [] vmax = -99999.0 vmin = 99999.0 - for u, v in self.stratigraphic_column[supergroup].items(): + for v in self.stratigraphic_column[supergroup].values(): colours.append(v["colour"]) boundaries.append(v["min"]) vmax = np.max([vmax, v["max"]]) @@ -261,7 +259,7 @@ def foliation_properties(self): @foliation_properties.setter def foliation_properties(self, foliation_properties): - if self.stratigraphic_order == None: + if self.stratigraphic_order is None: return if foliation_properties is None: for k in self.stratigraphic_column.keys(): @@ -292,9 +290,9 @@ def fault_properties(self, fault_properties): pts = self.fault_locations.loc[ self.fault_locations["feature_name"] == fname, ["X", "Y", "Z"] ] - fault_properties.loc[ - fname, ["centreEasting", "centreNorthing", "centreAltitude"] - ] = np.nanmean(pts, axis=0) + fault_properties.loc[fname, ["centreEasting", "centreNorthing", "centreAltitude"]] = ( + np.nanmean(pts, axis=0) + ) if ( "avgNormalEasting" not in fault_properties.columns or "avgNormalNorthing" not in fault_properties.columns @@ -423,7 +421,7 @@ def stratigraphic_name(self): names = [] if self.stratigraphic_order is None: return names - for name, sg in self.stratigraphic_order: + for _name, sg in self.stratigraphic_order: for g in sg: names.append(g) return names @@ -438,7 +436,7 @@ def _stratigraphic_value(self): keys are unit name, value is cumulative thickness/implicit function value """ stratigraphic_value = {} - for name, sg in self.stratigraphic_order: + for _name, sg in self.stratigraphic_order: value = 0.0 # reset for each supergroup for g in reversed(sg): if g not in self.thicknesses: diff --git a/LoopStructural/modelling/input/project_file.py b/LoopStructural/modelling/input/project_file.py index 008e409da..8481250b3 100644 --- a/LoopStructural/modelling/input/project_file.py +++ b/LoopStructural/modelling/input/project_file.py @@ -14,7 +14,7 @@ class LoopProjectfileProcessor(ProcessInputData): def __init__(self, projectfile, use_thickness=None): - if isinstance(projectfile, ProjectFile) == False: + if not isinstance(projectfile, ProjectFile): raise LoopTypeError("projectife must be of type ProjectFile") column_map = {"easting": "X", "northing": "Y", "altitude": "Z"} self.projectfile = projectfile @@ -79,6 +79,6 @@ def __init__(self, projectfile, use_thickness=None): intrusions=None, use_thickness=use_thickness, origin=self.projectfile.origin, - maximum=self.projectfile.maximum + maximum=self.projectfile.maximum, # fault_edge_properties=fault_edge_properties ) diff --git a/LoopStructural/modelling/intrusions/geom_conceptual_models.py b/LoopStructural/modelling/intrusions/geom_conceptual_models.py index bd44fa8fb..b4a0d3e52 100644 --- a/LoopStructural/modelling/intrusions/geom_conceptual_models.py +++ b/LoopStructural/modelling/intrusions/geom_conceptual_models.py @@ -23,13 +23,13 @@ def ellipse_function( else: - if minP == None: + if minP is None: minP = lateral_contact_data["coord1"].min() - if maxP == None: + if maxP is None: maxP = lateral_contact_data["coord1"].max() - if minS == None: + if minS is None: minS = lateral_contact_data["coord2"].abs().min() - if maxS == None: + if maxS is None: maxS = lateral_contact_data["coord2"].max() a = (maxP - minP) / 2 @@ -72,7 +72,7 @@ def constant_function( if othercontact_data.empty: return mean_growth - if mean_growth == None: + if mean_growth is None: mean_growth = othercontact_data.loc[:, 'coord1'].mean() data_ps = np.array([othercontact_data.loc[:, 'coord1'], othercontact_data.loc[:, 'coord2']]).T diff --git a/LoopStructural/modelling/intrusions/geometric_scaling_functions.py b/LoopStructural/modelling/intrusions/geometric_scaling_functions.py index bb6f52000..643fde0c6 100644 --- a/LoopStructural/modelling/intrusions/geometric_scaling_functions.py +++ b/LoopStructural/modelling/intrusions/geometric_scaling_functions.py @@ -1,11 +1,10 @@ - # import scipy as sc import scipy.stats as sct import numpy as np import pandas as pd -from ...utils import getLogger +from ...utils import getLogger, rng logger = getLogger(__name__) @@ -83,8 +82,8 @@ def thickness_from_geometric_scaling(length: float, intrusion_type: str) -> floa n_realizations = 10000 maxT = 0 - a = sct.norm.ppf(np.random.rand(n_realizations), loc=a_avg, scale=a_stdv) - b = sct.norm.ppf(np.random.rand(n_realizations), loc=b_avg, scale=b_stdv) + a = sct.norm.ppf(rng.random(n_realizations), loc=a_avg, scale=a_stdv) + b = sct.norm.ppf(rng.random(n_realizations), loc=b_avg, scale=b_stdv) maxT = b * np.power(length, a) maxT[maxT < 0] = None mean_t = np.nanmean(maxT) diff --git a/LoopStructural/modelling/intrusions/intrusion_builder.py b/LoopStructural/modelling/intrusions/intrusion_builder.py index 347593b85..b7a7cd849 100644 --- a/LoopStructural/modelling/intrusions/intrusion_builder.py +++ b/LoopStructural/modelling/intrusions/intrusion_builder.py @@ -6,7 +6,7 @@ from ..features.builders import BaseBuilder - +from ...utils import rng from .geometric_scaling_functions import * logger = getLogger(__name__) @@ -80,7 +80,7 @@ def create_grid_for_evaluation(self, spacing=None): ------- """ - if spacing == None: + if spacing is None: spacing = self.model.nsteps grid_points = self.model.regular_grid(spacing, shuffle=False) @@ -118,26 +118,20 @@ def create_geometry_using_geometric_scaling( self, geometric_scaling_parameters, reference_contact_data ): - intrusion_type = geometric_scaling_parameters.get("intrusion_type", None) + geometric_scaling_parameters.get("intrusion_type", None) intrusion_length = geometric_scaling_parameters.get("intrusion_length", None) - inflation_vector = geometric_scaling_parameters.get( - "inflation_vector", np.array([[0, 0, 1]]) - ) + geometric_scaling_parameters.get("inflation_vector", np.array([[0, 0, 1]])) thickness = geometric_scaling_parameters.get("thickness", None) if ( self.intrusion_frame.builder.intrusion_network_contact == "floor" or self.intrusion_frame.builder.intrusion_network_contact == "base" ): - inflation_vector = geometric_scaling_parameters.get( - "inflation_vector", np.array([[0, 0, 1]]) - ) + geometric_scaling_parameters.get("inflation_vector", np.array([[0, 0, 1]])) else: - inflation_vector = geometric_scaling_parameters.get( - "inflation_vector", np.array([[0, 0, -1]]) - ) + geometric_scaling_parameters.get("inflation_vector", np.array([[0, 0, -1]])) - if intrusion_length == None and thickness == None: + if intrusion_length is None and thickness is None: raise ValueError( "No {} data. Add intrusion_type and intrusion_length (or thickness) to geometric_scaling_parameters dictionary".format( self.intrusion_frame.builder.intrusion_other_contact @@ -146,24 +140,26 @@ def create_geometry_using_geometric_scaling( else: # -- create synthetic data to constrain interpolation using geometric scaling estimated_thickness = thickness - if estimated_thickness == None: - estimated_thickness = thickness_from_geometric_scaling( - intrusion_length, intrusion_type - ) + if estimated_thickness is None: + raise Exception('Not implemented') + # estimated_thickness = thickness_from_geometric_scaling( + # intrusion_length, intrusion_type + # ) print( "Building tabular intrusion using geometric scaling parameters: estimated thicknes = {} meters".format( round(estimated_thickness) ) ) - ( - other_contact_data_temp, - other_contact_data_xyz_temp, - ) = contact_pts_using_geometric_scaling( - estimated_thickness, reference_contact_data, inflation_vector - ) + raise Exception('Not implemented') + # ( + # other_contact_data_temp, + # other_contact_data_xyz_temp, + # ) = contact_pts_using_geometric_scaling( + # estimated_thickness, reference_contact_data, inflation_vector + # ) - return other_contact_data_temp + # return other_contact_data_temp def prepare_data(self, geometric_scaling_parameters): """Prepare the data to compute distance thresholds along the frame coordinates. @@ -182,25 +178,19 @@ def prepare_data(self, geometric_scaling_parameters): intrusion_data = self.data.copy() data_xyz = intrusion_data.loc[:, ["X", "Y", "Z"]].to_numpy() - intrusion_data.loc[:, "coord0"] = self.intrusion_frame[0].evaluate_value( - data_xyz - ) - intrusion_data.loc[:, "coord1"] = self.intrusion_frame[1].evaluate_value( - data_xyz - ) - intrusion_data.loc[:, "coord2"] = self.intrusion_frame[2].evaluate_value( - data_xyz - ) + intrusion_data.loc[:, "coord0"] = self.intrusion_frame[0].evaluate_value(data_xyz) + intrusion_data.loc[:, "coord1"] = self.intrusion_frame[1].evaluate_value(data_xyz) + intrusion_data.loc[:, "coord2"] = self.intrusion_frame[2].evaluate_value(data_xyz) # -- separate data between both sides of the intrusion, using intrusion axis (i.e., coord2 = 0) data_minside = intrusion_data[ - (intrusion_data["intrusion_side"] == True) & (intrusion_data["coord2"] <= 0) + (intrusion_data["intrusion_side"]) & (intrusion_data["coord2"] <= 0) ].copy() data_minside.reset_index(inplace=True, drop=True) data_maxside = intrusion_data[ - (intrusion_data["intrusion_side"] == True) & (intrusion_data["coord2"] > 0) + (intrusion_data["intrusion_side"]) & (intrusion_data["coord2"] > 0) ].copy() data_maxside.reset_index(inplace=True, drop=True) @@ -223,25 +213,21 @@ def prepare_data(self, geometric_scaling_parameters): # -- separate data between roof and floor data - intrusion_network_data_xyz = ( - self.intrusion_frame.builder.intrusion_network_data.loc[ - :, ["X", "Y", "Z"] - ].to_numpy() + intrusion_network_data_xyz = self.intrusion_frame.builder.intrusion_network_data.loc[ + :, ["X", "Y", "Z"] + ].to_numpy() + intrusion_network_data = self.intrusion_frame.builder.intrusion_network_data.loc[ + :, ["X", "Y", "Z"] + ].copy() + intrusion_network_data.loc[:, "coord0"] = self.intrusion_frame[0].evaluate_value( + intrusion_network_data_xyz ) - intrusion_network_data = ( - self.intrusion_frame.builder.intrusion_network_data.loc[ - :, ["X", "Y", "Z"] - ].copy() + intrusion_network_data.loc[:, "coord1"] = self.intrusion_frame[1].evaluate_value( + intrusion_network_data_xyz + ) + intrusion_network_data.loc[:, "coord2"] = self.intrusion_frame[2].evaluate_value( + intrusion_network_data_xyz ) - intrusion_network_data.loc[:, "coord0"] = self.intrusion_frame[ - 0 - ].evaluate_value(intrusion_network_data_xyz) - intrusion_network_data.loc[:, "coord1"] = self.intrusion_frame[ - 1 - ].evaluate_value(intrusion_network_data_xyz) - intrusion_network_data.loc[:, "coord2"] = self.intrusion_frame[ - 2 - ].evaluate_value(intrusion_network_data_xyz) intrusion_network_data.reset_index(inplace=True) # -- if no data points for roof or floor, use geometric scaling to create points for SGS @@ -253,28 +239,20 @@ def prepare_data(self, geometric_scaling_parameters): other_contact_data = intrusion_network_data else: - other_contact_data_temp1 = ( - self.intrusion_frame.builder.other_contact_data - ) + other_contact_data_temp1 = self.intrusion_frame.builder.other_contact_data other_contact_data_temp2 = self.create_geometry_using_geometric_scaling( geometric_scaling_parameters, intrusion_network_data ) - other_contact_data = pd.concat( - [other_contact_data_temp1, other_contact_data_temp2] - ) + other_contact_data = pd.concat([other_contact_data_temp1, other_contact_data_temp2]) - other_contact_data_xyz = other_contact_data.loc[ - :, ["X", "Y", "Z"] - ].to_numpy() + other_contact_data_xyz = other_contact_data.loc[:, ["X", "Y", "Z"]].to_numpy() else: self.thickness_data = True - other_contact_data_xyz = ( - self.intrusion_frame.builder.other_contact_data.loc[ - :, ["X", "Y", "Z"] - ].to_numpy() - ) + other_contact_data_xyz = self.intrusion_frame.builder.other_contact_data.loc[ + :, ["X", "Y", "Z"] + ].to_numpy() other_contact_data = self.intrusion_frame.builder.other_contact_data.loc[ :, ["X", "Y", "Z"] ].copy() @@ -299,27 +277,22 @@ def set_conceptual_models_parameters(self): representing the conceptual models of the intrusion geometry. """ - if ( - callable(self.lateral_extent_model) == False - or callable(self.vertical_extent_model) == False - ): - raise ValueError( - "lateral_extent_model and vertical_extent_model must be functions" - ) + if not callable(self.lateral_extent_model) or not callable(self.vertical_extent_model): + raise ValueError("lateral_extent_model and vertical_extent_model must be functions") grid_points_coord1 = self.evaluation_grid[2] modelcover, minP, maxP, minL, maxL = self.lateral_extent_model() mean_c0 = self.vertical_extent_model() - if minL == None: + if minL is None: minL = min( self.vertical_contact_data[0]["coord2"].min(), self.vertical_contact_data[1]["coord2"].min(), self.lateral_contact_data[0]["coord2"].min(), ) - if maxL == None: + if maxL is None: maxL = max( self.vertical_contact_data[0]["coord2"].max(), self.vertical_contact_data[1]["coord2"].max(), @@ -332,17 +305,17 @@ def set_conceptual_models_parameters(self): if minL > 0 and maxL > 0: minL = maxL * -1 - if modelcover == True: + if modelcover is True: minP = np.nanmin(grid_points_coord1) maxP = np.nanmax(grid_points_coord1) else: - if minP == None: + if minP is None: minP = min( self.vertical_contact_data[0]["coord1"].min(), self.vertical_contact_data[1]["coord1"].min(), self.lateral_contact_data[0]["coord1"].min(), ) - if maxP == None: + if maxP is None: maxP = max( self.vertical_contact_data[0]["coord1"].max(), self.vertical_contact_data[1]["coord1"].max(), @@ -350,7 +323,7 @@ def set_conceptual_models_parameters(self): ) # extra parameters for growth - if mean_c0 == None: + if mean_c0 is None: mean_growth = self.vertical_contact_data[1].loc[:, "coord0"].mean() else: mean_growth = mean_c0 @@ -358,21 +331,23 @@ def set_conceptual_models_parameters(self): maxG = self.vertical_contact_data[1]["coord0"].max() coord_PL_for_maxG = ( self.vertical_contact_data[1][ - self.vertical_contact_data[1].coord0 - == self.vertical_contact_data[1].coord0.max() + self.vertical_contact_data[1].coord0 == self.vertical_contact_data[1].coord0.max() ] .loc[:, ["coord1", "coord2"]] .to_numpy() ) - self.conceptual_model_parameters["minP"] = minP self.conceptual_model_parameters["maxP"] = maxP self.conceptual_model_parameters["minL"] = minL self.conceptual_model_parameters["maxL"] = maxL self.conceptual_model_parameters["model_cover"] = modelcover self.conceptual_model_parameters["mean_growth"] = mean_growth - self.conceptual_model_parameters["vertex"] = [coord_PL_for_maxG[0][0], coord_PL_for_maxG[0][1], maxG] + self.conceptual_model_parameters["vertex"] = [ + coord_PL_for_maxG[0][0], + coord_PL_for_maxG[0][1], + maxG, + ] def set_data_for_lateral_thresholds(self): """ @@ -395,14 +370,12 @@ def set_data_for_lateral_thresholds(self): minL = self.conceptual_model_parameters.get("minL") maxL = self.conceptual_model_parameters.get("maxL") - if self.width_data[0] == False: # i.e., no lateral data for side L<0 + if self.width_data[0] is False: # i.e., no lateral data for side L<0 print( "Not enought lateral data to constrain side L<0. Conceptual model will be used to constrain lateral extent" ) - random_p = pd.DataFrame( - np.random.uniform(minP, maxP, 10), columns=["coord1"] - ) + random_p = pd.DataFrame(rng.uniform(minP, maxP, 10), columns=["coord1"]) conceptual_l = self.lateral_extent_model( lateral_contact_data=random_p, minP=minP, @@ -452,22 +425,18 @@ def set_data_for_lateral_thresholds(self): data_residual_minL = ( data_conceptual_minL[:, 1] - data_minL.loc[:, "coord2"] ).to_numpy() - data_for_min_L = data_minL.loc[ - :, ["X", "Y", "Z", "coord0", "coord1", "coord2"] - ].copy() + data_for_min_L = data_minL.loc[:, ["X", "Y", "Z", "coord0", "coord1", "coord2"]].copy() data_for_min_L.loc[:, "l_residual"] = data_residual_minL data_for_min_L.loc[:, "l_conceptual"] = data_conceptual_minL[:, 1] data_for_min_L.reset_index(inplace=True) # data_for_min_L.loc[:, "ref_coord"] = 0 - if self.width_data[1] == False: # i.e., no lateral data for side L>0 + if not self.width_data[1]: # i.e., no lateral data for side L>0 print( "Not enought lateral data to constrain side L>0. Conceptual model will be used to constrain lateral extent" ) - random_p = pd.DataFrame( - np.random.uniform(minP, maxP, 10), columns=["coord1"] - ) + random_p = pd.DataFrame(rng.uniform(minP, maxP, 10), columns=["coord1"]) conceptual_l = self.lateral_extent_model( lateral_contact_data=random_p, minP=minP, @@ -516,9 +485,7 @@ def set_data_for_lateral_thresholds(self): data_residual_maxL = ( data_conceptual_maxL[:, 0] - data_maxL.loc[:, "coord2"] ).to_numpy() - data_for_max_L = data_maxL.loc[ - :, ["X", "Y", "Z", "coord0", "coord1", "coord2"] - ].copy() + data_for_max_L = data_maxL.loc[:, ["X", "Y", "Z", "coord0", "coord1", "coord2"]].copy() data_for_max_L.loc[:, "l_residual"] = data_residual_maxL data_for_max_L.loc[:, "l_conceptual"] = data_conceptual_maxL[:, 0] data_for_max_L.reset_index(inplace=True) @@ -527,34 +494,27 @@ def set_data_for_lateral_thresholds(self): # check if roof or floor data outside of conceptual model. # if so, add as constraints to conceptual model. - vertical_data = pd.concat( - [self.vertical_contact_data[0], self.vertical_contact_data[1]] - ) - vertical_data.loc[ - :, ["conceptual_maxside", "conceptual_minside"] - ] = self.lateral_extent_model( - lateral_contact_data=vertical_data, - minP=minP, - maxP=maxP, - minS=minL, - maxS=maxL, + vertical_data = pd.concat([self.vertical_contact_data[0], self.vertical_contact_data[1]]) + vertical_data.loc[:, ["conceptual_maxside", "conceptual_minside"]] = ( + self.lateral_extent_model( + lateral_contact_data=vertical_data, + minP=minP, + maxP=maxP, + minS=minL, + maxS=maxL, + ) ) data_minL_temp = vertical_data[vertical_data["coord2"] < 0].copy() data_for_min_L_ = ( - data_minL_temp[ - data_minL_temp["coord2"] < data_minL_temp["conceptual_minside"] - ] + data_minL_temp[data_minL_temp["coord2"] < data_minL_temp["conceptual_minside"]] .loc[:, ["X", "Y", "Z", "coord0", "coord1", "coord2", "conceptual_minside"]] .copy() ) data_for_min_L_.loc[:, "l_residual"] = ( - data_for_min_L_.loc[:, "conceptual_minside"] - - data_for_min_L_.loc[:, "coord2"] - ) - data_for_min_L_.rename( - columns={"conceptual_minside": "l_conceptual"}, inplace=True + data_for_min_L_.loc[:, "conceptual_minside"] - data_for_min_L_.loc[:, "coord2"] ) + data_for_min_L_.rename(columns={"conceptual_minside": "l_conceptual"}, inplace=True) data_for_min_L_.reset_index(inplace=True) data_for_min_L_.drop_duplicates( subset=[ @@ -570,28 +530,20 @@ def set_data_for_lateral_thresholds(self): inplace=True, ) - if ( - len(data_for_min_L_) > 0 - and self.constrain_sides_with_rooffloor_data == True - ): + if len(data_for_min_L_) > 0 and self.constrain_sides_with_rooffloor_data: print("adding data from roof/floor to constrain L<0") data_for_min_L = pd.concat([data_for_min_L, data_for_min_L_]) data_maxL_temp = vertical_data[vertical_data["coord2"] >= 0].copy() data_for_max_L_ = ( - data_maxL_temp[ - data_maxL_temp["coord2"] > data_maxL_temp["conceptual_maxside"] - ] + data_maxL_temp[data_maxL_temp["coord2"] > data_maxL_temp["conceptual_maxside"]] .loc[:, ["X", "Y", "Z", "coord0", "coord1", "coord2", "conceptual_maxside"]] .copy() ) data_for_max_L_.loc[:, "l_residual"] = ( - data_for_max_L_.loc[:, "conceptual_maxside"] - - data_for_max_L_.loc[:, "coord2"] - ) - data_for_max_L_.rename( - columns={"conceptual_maxside": "l_conceptual"}, inplace=True + data_for_max_L_.loc[:, "conceptual_maxside"] - data_for_max_L_.loc[:, "coord2"] ) + data_for_max_L_.rename(columns={"conceptual_maxside": "l_conceptual"}, inplace=True) data_for_max_L_.reset_index(inplace=True) data_for_max_L_.drop_duplicates( subset=[ @@ -607,10 +559,7 @@ def set_data_for_lateral_thresholds(self): inplace=True, ) - if ( - len(data_for_max_L_) > 0 - and self.constrain_sides_with_rooffloor_data == True - ): + if len(data_for_max_L_) > 0 and self.constrain_sides_with_rooffloor_data: print("adding data from roof/floor to constrain L>0") data_for_max_L = pd.concat([data_for_max_L, data_for_max_L_]) @@ -656,16 +605,14 @@ def set_data_for_vertical_thresholds(self): maxS=maxL, vertex=vertex, ) - data_residual_G = ( - data_conceptual_G[:, 1] - other_contact_data.loc[:, "coord0"] - ).to_numpy() + data_residual_G = (data_conceptual_G[:, 1] - other_contact_data.loc[:, "coord0"]).to_numpy() inputsimdata_maxG = other_contact_data.loc[ :, ["X", "Y", "Z", "coord0", "coord1", "coord2"] ].copy() inputsimdata_maxG.loc[:, "g_residual"] = data_residual_G inputsimdata_maxG.loc[:, "g_conceptual"] = data_conceptual_G[:, 1] - if self.thickness_data == False: + if not self.thickness_data: inputsimdata_maxG.loc[:, "g_residual"] = 0 p = np.linspace(minP, maxP, 10) @@ -673,7 +620,7 @@ def set_data_for_vertical_thresholds(self): pp, ll = np.meshgrid(p, l) pl = np.array([pp.flatten(), ll.flatten()]).T - np.random.shuffle(pl) + rng.shuffle(pl) inputsimdata_maxG_ = pd.DataFrame(pl[:30, :], columns=["coord1", "coord2"]) data_conceptual_G_ = self.vertical_extent_model( @@ -689,17 +636,13 @@ def set_data_for_vertical_thresholds(self): inputsimdata_maxG_.loc[:, "g_conceptual"] = data_conceptual_G_[:, 1] inputsimdata_maxG_.loc[:, "g_residual"] = 0 - inputsimdata_maxG_complete = pd.concat( - [inputsimdata_maxG, inputsimdata_maxG_] - ) + inputsimdata_maxG_complete = pd.concat([inputsimdata_maxG, inputsimdata_maxG_]) else: inputsimdata_maxG_complete = inputsimdata_maxG # --- growth simulation input data for intrusion network conditioning - inputsimdata_inetG = inet_data.loc[ - :, ["X", "Y", "Z", "coord0", "coord1", "coord2"] - ].copy() + inputsimdata_inetG = inet_data.loc[:, ["X", "Y", "Z", "coord0", "coord1", "coord2"]].copy() self.data_for_vertical_extent_calculation = [ inputsimdata_maxG_complete, @@ -726,6 +669,5 @@ def build( self.prepare_data(geometric_scaling_parameters) self.create_grid_for_evaluation() - self.set_data_for_lateral_thresholds() self.set_data_for_vertical_thresholds() diff --git a/LoopStructural/modelling/intrusions/intrusion_feature.py b/LoopStructural/modelling/intrusions/intrusion_feature.py index 254be886e..d539cc4b3 100644 --- a/LoopStructural/modelling/intrusions/intrusion_feature.py +++ b/LoopStructural/modelling/intrusions/intrusion_feature.py @@ -26,7 +26,6 @@ def __init__( name="UnnamedIntrusion", model=None, ): - """ Parameters ---------- @@ -146,14 +145,13 @@ def interpolate_vertical_thresholds(self, points_coord1, points_coord2): minG_inputdata_coord0 = inputsimdata_minG.coord0.to_numpy() minG_inputdata_coord1 = inputsimdata_minG.coord1.to_numpy() minG_inputdata_coord2 = inputsimdata_minG.coord2.to_numpy() - minG_inputdata_residual = inputsimdata_minG.coord0.to_numpy() - minG_inputdata_conceptual = 0 + # inputsimdata_minG.coord0.to_numpy() - maxG_inputdata_coord0 = inputsimdata_maxG.coord0.to_numpy() + # inputsimdata_maxG.coord0.to_numpy() maxG_inputdata_coord1 = inputsimdata_maxG.coord1.to_numpy() maxG_inputdata_coord2 = inputsimdata_maxG.coord2.to_numpy() maxG_inputdata_residual = inputsimdata_maxG.g_residual.to_numpy() - maxG_inputdata_conceptual = inputsimdata_maxG.g_conceptual.to_numpy() + # inputsimdata_maxG.g_conceptual.to_numpy() # min,max P and L should be the same as in conceptual models minP = self.builder.conceptual_model_parameters.get("minP") @@ -187,26 +185,26 @@ def interpolate_vertical_thresholds(self, points_coord1, points_coord2): vertex=vertex, )[:, 1] - maxG_minP = np.min(maxG_inputdata_coord1) - maxG_minP_value = maxG_residual_interpolator( - maxG_minP, - maxG_inputdata_coord2[np.where(maxG_inputdata_coord1 == maxG_minP)][0], - ) - maxG_maxP = np.max(maxG_inputdata_coord1) - maxG_maxP_value = maxG_residual_interpolator( - maxG_maxP, - maxG_inputdata_coord2[np.where(maxG_inputdata_coord1 == maxG_maxP)][0], - ) - maxG_minL = np.min(maxG_inputdata_coord2) - maxG_minL_value = maxG_residual_interpolator( - maxG_inputdata_coord1[np.where(maxG_inputdata_coord2 == maxG_minL)][0], - maxG_minL, - ) - maxG_maxL = np.max(maxG_inputdata_coord2) - maxG_maxL_value = maxG_residual_interpolator( - maxG_inputdata_coord1[np.where(maxG_inputdata_coord2 == maxG_maxL)][0], - maxG_maxL, - ) + # maxG_minP = np.min(maxG_inputdata_coord1) + # maxG_residual_interpolator( + # maxG_minP, + # maxG_inputdata_coord2[np.where(maxG_inputdata_coord1 == maxG_minP)][0], + # ) + # maxG_maxP = np.max(maxG_inputdata_coord1) + # maxG_residual_interpolator( + # maxG_maxP, + # maxG_inputdata_coord2[np.where(maxG_inputdata_coord1 == maxG_maxP)][0], + # ) + # maxG_minL = np.min(maxG_inputdata_coord2) + # maxG_residual_interpolator( + # maxG_inputdata_coord1[np.where(maxG_inputdata_coord2 == maxG_minL)][0], + # maxG_minL, + # ) + # maxG_maxL = np.max(maxG_inputdata_coord2) + # maxG_residual_interpolator( + # maxG_inputdata_coord1[np.where(maxG_inputdata_coord2 == maxG_maxL)][0], + # maxG_maxL, + # ) residuals = maxG_residual_interpolator(points_coord1, points_coord2) thresholds = maxG_conceptual_model - residuals @@ -231,7 +229,6 @@ def interpolate_vertical_thresholds(self, points_coord1, points_coord2): return threshold_values, residual_values, conceptual_values def evaluate_value(self, points): - """ Computes a distance scalar field to the intrusion contact (isovalue = 0). @@ -330,7 +327,6 @@ def evaluate_value(self, points): return intrusion_sf def evaluate_value_test(self, points): - """ Computes a distance scalar field to the intrusion contact (isovalue = 0). diff --git a/LoopStructural/modelling/intrusions/intrusion_frame_builder.py b/LoopStructural/modelling/intrusions/intrusion_frame_builder.py index 5938f42a2..6c607ae69 100644 --- a/LoopStructural/modelling/intrusions/intrusion_frame_builder.py +++ b/LoopStructural/modelling/intrusions/intrusion_frame_builder.py @@ -1,6 +1,6 @@ from ...modelling.features.builders import StructuralFrameBuilder from ...modelling.features.fault import FaultSegment -from ...utils import getLogger, BoundingBox +from ...utils import getLogger, BoundingBox, rng from typing import Union @@ -35,9 +35,7 @@ def __init__( reference to the model containing the fault """ - StructuralFrameBuilder.__init__( - self, interpolatortype, bounding_box, nelements, **kwargs - ) + StructuralFrameBuilder.__init__(self, interpolatortype, bounding_box, nelements, **kwargs) self.origin = np.array([np.nan, np.nan, np.nan]) self.maximum = np.array([np.nan, np.nan, np.nan]) @@ -47,7 +45,9 @@ def __init__( self.faults = [] # -- intrusion frame parameters - self.intrusion_network_contact = None # string, contact which is used to constrain coordinate 0 (roof/top or floor/base) + self.intrusion_network_contact = ( + None # string, contact which is used to constrain coordinate 0 (roof/top or floor/base) + ) self.intrusion_other_contact = ( None # string, the contact which is NOT used to constrain coordinate 0 ) @@ -76,9 +76,7 @@ def __init__( def update_geometry(self, points): self.origin = np.nanmin(np.array([np.min(points, axis=0), self.origin]), axis=0) - self.maximum = np.nanmax( - np.array([np.max(points, axis=0), self.maximum]), axis=0 - ) + self.maximum = np.nanmax(np.array([np.max(points, axis=0), self.maximum]), axis=0) self.origin[self.origin < self.minimum_origin] = self.minimum_origin[ self.origin < self.minimum_origin ] @@ -128,6 +126,7 @@ def set_intrusion_frame_c0_data(self, intrusion_data: pd.DataFrame): Returns ------- """ + other_contact = None if self.intrusion_network_contact == "roof": other_contact = "floor" elif self.intrusion_network_contact == "top": @@ -160,7 +159,7 @@ def create_grid_for_indicator_fxs(self, spacing=None): ------- """ - if spacing == None: + if spacing is None: spacing = self.model.nsteps grid_points = self.model.regular_grid(spacing, shuffle=False) @@ -258,9 +257,7 @@ def add_faults_anisotropies(self, fault_list: list = []): self.anisotropies_fault_list.append(fault_list) for i in range(len(fault_list)): - if ( - fault_list[i] in self.faults - ): # remove pre-intrusion faults from faults list + if fault_list[i] in self.faults: # remove pre-intrusion faults from faults list self.faults.remove(fault_list[i]) for j in range(3): self.builders[j].faults.remove(fault_list[i]) @@ -326,9 +323,9 @@ def set_intrusion_steps_parameters(self): unit_to_name = step.get("unit_to") series_to_name = step.get("series_to") - unit_to_id = self.model.stratigraphic_column[series_to_name.name][ - unit_to_name - ].get("id") + unit_to_id = self.model.stratigraphic_column[series_to_name.name][unit_to_name].get( + "id" + ) intrusion_network_data.loc[:, "model_values"] = self.model.evaluate_model( self.model.rescale(intrusion_network_data_xyz, inplace=False) @@ -337,18 +334,14 @@ def set_intrusion_steps_parameters(self): # -- check if step is within the same unit. If so, find clusters of data: if unit_from_name == unit_to_name: data_points_xyz = ( - intrusion_network_data[ - intrusion_network_data["model_values"] == unit_from_id - ] + intrusion_network_data[intrusion_network_data["model_values"] == unit_from_id] .loc[:, ["X", "Y", "Z"]] .copy() .to_numpy() ) series_values = series_from_name.evaluate_value(data_points_xyz) series_values_mod = series_values.reshape(len(series_values), 1) - contact_clustering = KMeans(n_clusters=2, random_state=0).fit( - series_values_mod - ) + contact_clustering = KMeans(n_clusters=2, random_state=0).fit(series_values_mod) # contact 0 z = np.ma.masked_not_equal(contact_clustering.labels_, 0) @@ -357,7 +350,7 @@ def set_intrusion_steps_parameters(self): contact_0_mean = np.mean(contact_0_vals) contact_0_std = np.std(contact_0_vals) - if contact_0_std == 0 or np.isnan(contact_0_std) == True: + if contact_0_std == 0 or np.isnan(contact_0_std): contact_0_std = std_backup # contact 1 @@ -367,7 +360,7 @@ def set_intrusion_steps_parameters(self): contact_1_mean = np.mean(contact_1_vals) contact_1_std = np.std(contact_1_vals) - if contact_1_std == 0 or np.isnan(contact_1_std) == True: + if contact_1_std == 0 or np.isnan(contact_1_std): contact_1_std = std_backup if contact_0_mean <= contact_1_mean: @@ -384,37 +377,29 @@ def set_intrusion_steps_parameters(self): else: # -- step between different stratigraphic units data_points_from_xyz = ( - intrusion_network_data[ - intrusion_network_data["model_values"] == unit_from_id - ] + intrusion_network_data[intrusion_network_data["model_values"] == unit_from_id] .loc[:, ["X", "Y", "Z"]] .copy() .to_numpy() ) - step_structure_points_vals = step_structure[0].evaluate_value( - data_points_from_xyz - ) + step_structure_points_vals = step_structure[0].evaluate_value(data_points_from_xyz) if len(data_points_from_xyz) == 0: # no data points in strat unit - unit_from_min = self.model.stratigraphic_column[ - series_from_name.name - ][unit_from_name].get("min") - unit_from_max = self.model.stratigraphic_column[ - series_from_name.name - ][unit_from_name].get("max") + unit_from_min = self.model.stratigraphic_column[series_from_name.name][ + unit_from_name + ].get("min") + unit_from_max = self.model.stratigraphic_column[series_from_name.name][ + unit_from_name + ].get("max") self.intrusion_steps[step_i]["unit_from_mean"] = ( unit_from_min + (unit_from_max - unit_from_min) / 2 ) self.intrusion_steps[step_i]["unit_from_std"] = std_backup else: - series_values = series_from_name.evaluate_value( - data_points_from_xyz - ) + series_values = series_from_name.evaluate_value(data_points_from_xyz) mask = step_structure_points_vals < 0 if len(mask) > 0: series_values_mod = np.ma.compressed( - np.ma.masked_array( - series_values, step_structure_points_vals < 0 - ) + np.ma.masked_array(series_values, step_structure_points_vals < 0) ) else: series_values_mod = series_values @@ -425,16 +410,12 @@ def set_intrusion_steps_parameters(self): step["unit_from_std"] = std_backup data_points_to_xyz = ( - intrusion_network_data[ - intrusion_network_data["model_values"] == unit_to_id - ] + intrusion_network_data[intrusion_network_data["model_values"] == unit_to_id] .loc[:, ["X", "Y", "Z"]] .copy() .to_numpy() ) - step_structure_points_vals = step_structure[0].evaluate_value( - data_points_to_xyz - ) + step_structure_points_vals = step_structure[0].evaluate_value(data_points_to_xyz) if len(data_points_to_xyz) == 0: unit_to_min = self.model.stratigraphic_column[series_to_name.name][ unit_to_name @@ -449,9 +430,7 @@ def set_intrusion_steps_parameters(self): mask = step_structure_points_vals > 0 if len(mask) > 0: series_values_mod = np.ma.compressed( - np.ma.masked_array( - series_values, step_structure_points_vals > 0 - ) + np.ma.masked_array(series_values, step_structure_points_vals > 0) ) else: series_values_mod = series_values @@ -460,10 +439,10 @@ def set_intrusion_steps_parameters(self): check_mean = step["unit_to_mean"] check_std = step["unit_to_std"] - if np.isnan(check_mean) == True: + if np.isnan(check_mean): step["unit_to_mean"] = 40 - if np.isnan(check_std) == True: + if np.isnan(check_std): step["unit_to_std"] = std_backup if step["unit_to_std"] == 0: @@ -494,18 +473,12 @@ def set_marginal_faults_parameters(self): for fault_i in self.marginal_faults.keys(): marginal_fault = self.marginal_faults[fault_i].get("structure") - block = self.marginal_faults[fault_i].get( - "block" - ) # hanging wall or foot wall - emplacement_mechanism = self.marginal_faults[fault_i].get( - "emplacement_mechanism" - ) + block = self.marginal_faults[fault_i].get("block") # hanging wall or foot wall + self.marginal_faults[fault_i].get("emplacement_mechanism") series_name = self.marginal_faults[fault_i].get("series") series_values_temp = series_name.evaluate_value(intrusion_frame_c0_data_xyz) - faults_values_temp = marginal_fault[0].evaluate_value( - intrusion_frame_c0_data_xyz - ) + faults_values_temp = marginal_fault[0].evaluate_value(intrusion_frame_c0_data_xyz) if block == "hanging wall": series_values = series_values_temp[faults_values_temp > 0] @@ -546,13 +519,9 @@ def set_intrusion_frame_parameters( """ - self.intrusion_network_contact = intrusion_frame_parameters.get( - "contact", "floor" - ) + self.intrusion_network_contact = intrusion_frame_parameters.get("contact", "floor") - self.set_intrusion_frame_c0_data( - intrusion_data - ) # separates roof and floor data + self.set_intrusion_frame_c0_data(intrusion_data) # separates roof and floor data # if self.intrusion_network_type == "interpolated": @@ -601,9 +570,7 @@ def set_intrusion_frame_parameters( self.add_faults_anisotropies(fault_anisotropies) # add contact anisotropies list - contact_anisotropies = intrusion_frame_parameters.get( - "contact_anisotropies", None - ) + contact_anisotropies = intrusion_frame_parameters.get("contact_anisotropies", None) self.anisotropies_series_list = contact_anisotropies @@ -730,9 +697,7 @@ def create_constraints_for_c0(self, **kwargs): If = self.indicator_function_faults(delta=self.delta_faults) if len(np.where(If == 1)[0]) == 0: - logger.error( - "No faults identified, you may increase the value of delta_f" - ) + logger.error("No faults identified, you may increase the value of delta_f") If_sum = np.sum(If, axis=1) @@ -740,7 +705,7 @@ def create_constraints_for_c0(self, **kwargs): for i, step_i in enumerate(self.intrusion_steps.keys()): delta_contact = self.delta_contacts[i] - if type(delta_contact) is list: + if isinstance(delta_contact, list): delta_contact0 = delta_contact[0] delta_contact1 = delta_contact[1] else: @@ -753,24 +718,20 @@ def create_constraints_for_c0(self, **kwargs): fault_gridpoints_vals = step_fault[0].evaluate_value(grid_points) - series_from_gridpoints_vals = series_from_name.evaluate_value( - grid_points - ) + series_from_gridpoints_vals = series_from_name.evaluate_value(grid_points) if series_from_name == series_to_name: series_to_gridpoints_vals = series_from_gridpoints_vals else: - series_to_gridpoints_vals = series_to_name.evaluate_value( - grid_points - ) + series_to_gridpoints_vals = series_to_name.evaluate_value(grid_points) - contacts0_val_min = self.intrusion_steps[step_i].get( - "unit_from_mean" - ) - (self.intrusion_steps[step_i].get("unit_from_std") * delta_contact0) - contacts0_val_max = self.intrusion_steps[step_i].get( - "unit_from_mean" - ) + (self.intrusion_steps[step_i].get("unit_from_std") * delta_contact0) + contacts0_val_min = self.intrusion_steps[step_i].get("unit_from_mean") - ( + self.intrusion_steps[step_i].get("unit_from_std") * delta_contact0 + ) + contacts0_val_max = self.intrusion_steps[step_i].get("unit_from_mean") + ( + self.intrusion_steps[step_i].get("unit_from_std") * delta_contact0 + ) contacts1_val_min = self.intrusion_steps[step_i].get("unit_to_mean") - ( self.intrusion_steps[step_i].get("unit_to_std") * delta_contact1 ) @@ -811,7 +772,7 @@ def create_constraints_for_c0(self, **kwargs): ] region = self.intrusion_steps[step_i].get("region", None) - if region == None: + if region is None: step_i_constraints = step_i_constraints_temp else: mask = region(step_i_constraints_temp) @@ -821,9 +782,7 @@ def create_constraints_for_c0(self, **kwargs): [intrusion_reference_contact_points, step_i_constraints] ) - splits_from_sill_name = self.intrusion_steps[step_i].get( - "splits_from", None - ) + splits_from_sill_name = self.intrusion_steps[step_i].get("splits_from", None) # check if sill comes from anothe sill # (add all the constraint from original sill, this have to be changed and adapted so it adds constraints for specific faults) @@ -832,10 +791,8 @@ def create_constraints_for_c0(self, **kwargs): splits_from_sill_steps = self.model.__getitem__( splits_from_sill_name ).intrusion_frame.builder.intrusion_steps - for j, step_j in enumerate(splits_from_sill_steps.keys()): - step_j_hg_constraints = splits_from_sill_steps[step_j].get( - "constraints_hw" - ) + for step_j in splits_from_sill_steps.keys(): + step_j_hg_constraints = splits_from_sill_steps[step_j].get("constraints_hw") intrusion_reference_contact_points = np.vstack( [intrusion_reference_contact_points, step_j_hg_constraints] ) @@ -850,15 +807,11 @@ def create_constraints_for_c0(self, **kwargs): If_sum = np.sum(If, axis=1) # evaluate grid points in series - for i, fault_i in enumerate(self.marginal_faults.keys()): + for fault_i in self.marginal_faults.keys(): delta_contact = self.marginal_faults[fault_i].get("delta_c", 1) marginal_fault = self.marginal_faults[fault_i].get("structure") - block = self.marginal_faults[fault_i].get( - "block" - ) # hanging wall or foot wall - emplacement_mechanisms = self.marginal_faults[fault_i].get( - "emplacement_mechanism" - ) + block = self.marginal_faults[fault_i].get("block") # hanging wall or foot wall + self.marginal_faults[fault_i].get("emplacement_mechanism") series_name = self.marginal_faults[fault_i].get("series") fault_gridpoints_vals = marginal_fault[0].evaluate_value(grid_points) @@ -901,7 +854,7 @@ def create_constraints_for_c0(self, **kwargs): region = self.marginal_faults[fault_i].get("region", None) - if region == None: + if region is None: marginalfault_i_constraints = marginalfault_i_constraints_temp else: mask = region(marginalfault_i_constraints_temp) @@ -921,10 +874,7 @@ def create_constraints_for_c0(self, **kwargs): stratigraphy_gradient_grid_points = series_id.evaluate_gradient(grid_points) # If intrusion frame c0 is built usinf roof/top contact, then change vector direction - if ( - self.intrusion_network_contact == "roof" - or self.intrusion_network_contact == "top" - ): + if self.intrusion_network_contact == "roof" or self.intrusion_network_contact == "top": grid_points_inflation = stratigraphy_gradient_grid_points * (-1) else: grid_points_inflation = stratigraphy_gradient_grid_points @@ -965,9 +915,7 @@ def get_indicator_function_points(self, ifx_type="contacts"): return If_points - def set_intrusion_frame_data( - self, intrusion_frame_data - ): # , intrusion_network_points): + def set_intrusion_frame_data(self, intrusion_frame_data): # , intrusion_network_points): """Adds the intrusion network points as coordinate 0 data for the intrusion frame Parameters @@ -988,8 +936,8 @@ def set_intrusion_frame_data( coord_0_data = intrusion_frame_data[intrusion_frame_data["coord"] == 0].copy() if len(coord_0_data) == 0: - np.random.shuffle(self.frame_c0_gradients) - if self.gradients_constraints_weight == None: + rng.shuffle(self.frame_c0_gradients) + if self.gradients_constraints_weight is None: n_grad_constraints = 100 else: n_grad_constraints = int( @@ -1004,9 +952,7 @@ def set_intrusion_frame_data( coord_0_grads["feature_name"] = self.name coord_0_grads["w"] = 1 - intrusion_frame_data_complete = pd.concat( - [intrusion_frame_data_temp, coord_0_grads] - ) + intrusion_frame_data_complete = pd.concat([intrusion_frame_data_temp, coord_0_grads]) else: intrusion_frame_data_complete = intrusion_frame_data_temp diff --git a/LoopStructural/modelling/intrusions/intrusion_support_functions.py b/LoopStructural/modelling/intrusions/intrusion_support_functions.py index 4fce8a563..cc9f0e250 100644 --- a/LoopStructural/modelling/intrusions/intrusion_support_functions.py +++ b/LoopStructural/modelling/intrusions/intrusion_support_functions.py @@ -171,14 +171,9 @@ def shortest_path(inlet, outlet, time_map): inet[temp_inlet[0], temp_inlet[1]] = 0 i = 0 - n_rows = len(time_map) - n_cols = len(time_map[0]) - while True: i = i + 1 - time_temp_inlet = time_map[ - temp_inlet[0], temp_inlet[1] - ] # obtain time value of temporary outlet + neighbors = element_neighbour( temp_inlet, time_map, inet ) # identify neighbours elements of temporary outlet @@ -340,7 +335,6 @@ def index_min(array): for i in range( 8 ): # create a dictionary assining positions from 0 to 7 to the values in the array - a = i if array[i] >= 0: index_array.update({i: array[i]}) @@ -391,7 +385,6 @@ def new_inlet(inlet, direction): def grid_from_array(array, fixed_coord, lower_extent, upper_extent): - """ Create an numpy matrix of [i,j,x,y,z,values in array], given an array of 2 dimensions (any combination between x, y an z) diff --git a/LoopStructural/utils/helper.py b/LoopStructural/utils/helper.py index 2c25f1298..b5341c2db 100644 --- a/LoopStructural/utils/helper.py +++ b/LoopStructural/utils/helper.py @@ -31,9 +31,6 @@ def get_data_bounding_box_map(xyz, buffer): minz = np.min(xyz[:, 2]) maxz = np.max(xyz[:, 2]) - xlen = maxx - minx - ylen = maxy - miny - zlen = maxz - minz # length = np.max([xlen, ylen, zlen]) minx -= buffer maxx += buffer diff --git a/LoopStructural/utils/linalg.py b/LoopStructural/utils/linalg.py index 4f261378f..751bb4df5 100644 --- a/LoopStructural/utils/linalg.py +++ b/LoopStructural/utils/linalg.py @@ -4,5 +4,5 @@ def normalise(v): v = np.array(v) - norm = np.linalg.norm(v, axis=1) + np.linalg.norm(v, axis=1) return v / np.linalg.norm(v) diff --git a/pyproject.toml b/pyproject.toml index 2d4e82553..dfc0a57ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,12 +105,13 @@ quiet-level = 3 [tool.ruff] exclude = ['.git', 'pycache__', 'build', 'dist', 'doc/examples', 'doc/_build'] -external = ["E131", "D102", "D105"] line-length = 100 indent-width = 4 target-version = 'py39' [tool.ruff.lint] +external = ["E131", "D102", "D105"] + ignore = [ # whitespace before ':' "E203",