diff --git a/cyprecice/SolverInterface.pxd b/cyprecice/SolverInterface.pxd index dd3368c9..3ae8db3a 100644 --- a/cyprecice/SolverInterface.pxd +++ b/cyprecice/SolverInterface.pxd @@ -101,6 +101,18 @@ cdef extern from "precice/SolverInterface.hpp" namespace "precice": void readScalarData (const int dataID, const int valueIndex, double& value) const + # Gradient related API + + bool isGradientDataRequired(int dataID) const; + + void writeBlockVectorGradientData(int dataID, int size, const int* valueIndices, const double* gradientValues); + + void writeScalarGradientData(int dataID, int valueIndex, const double* gradientValues); + + void writeVectorGradientData(int dataID, int valueIndex, const double* gradientValues); + + void writeBlockScalarGradientData(int dataID, int size, const int* valueIndices, const double* gradientValues); + # direct mesh access void setMeshAccessRegion (const int meshID, const double* boundingBox) const diff --git a/cyprecice/cyprecice.pyx b/cyprecice/cyprecice.pyx index 7d84c4ce..1fca31df 100644 --- a/cyprecice/cyprecice.pyx +++ b/cyprecice/cyprecice.pyx @@ -1211,6 +1211,245 @@ cdef class Interface: self.thisptr.readScalarData (data_id, vertex_id, _value) return _value + def write_block_vector_gradient_data (self, data_id, vertex_ids, gradientValues): + """ + Writes vector gradient data given as block. This function writes gradient values of specified vertices to a dataID. + Values are provided as a block of continuous memory. Values are stored in a numpy array [N x D] where N = number + of vertices and D = number of gradient components. + + Parameters + ---------- + data_id : int + Data ID to write to. + vertex_ids : array_like + Indices of the vertices. + gradientValues : array_like + Gradient values differentiated in the spacial direction (dx, dy) for 2D space, (dx, dy, dz) for 3D space + + Notes + ----- + Previous calls: + Count of available elements at values matches the configured dimension + Count of available elements at vertex_ids matches the given size + Initialize() has been called + Data with dataID has attribute hasGradient = true + + Examples + -------- + Write block gradient vector data for a 2D problem with 2 vertices: + >>> data_id = 1 + >>> vertex_ids = [1, 2] + >>> gradientValues = np.array([[v1x_dx, v1y_dx, v1x_dy, v1y_dy], [v2x_dx, v2y_dx, v2x_dy, v2y_dy]]) + >>> interface.write_block_vector_gradient_data(data_id, vertex_ids, gradientValues) + + Write block vector data for a 3D (D=3) problem with 2 (N=2) vertices: + >>> data_id = 1 + >>> vertex_ids = [1, 2] + >>> gradientValues = np.array([[v1x_dx, v1y_dx, v1z_dx, v1x_dy, v1y_dy, v1z_dy, v1x_dz, v1y_dz, v1z_dz], [v2x_dx, v2y_dx, v2z_dx, v2x_dy, v2y_dy, v2z_dy, v2x_dz, v2y_dz, v2z_dz]]) + >>> interface.write_block_vector_gradient_data(data_id, vertex_ids, gradientValues) + """ + check_array_like(vertex_ids, "vertex_ids", "write_block_vector_gradient_data") + check_array_like(gradientValues, "gradientValues", "write_block_vector_gradient_data") + + if not isinstance(gradientValues, np.ndarray): + gradientValues = np.asarray(gradientValues) + + if len(gradientValues) > 0: + size, dimensions = gradientValues.shape + assert dimensions == self.get_dimensions() * self.get_dimensions(), "Dimensions of vector data in write_block_vector_gradient_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions() * self.get_dimensions()) + if len(gradientValues) == 0: + size = 0 + + cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) + cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) + + assert _gradientValues.size == size * self.get_dimensions() * self.get_dimensions(), "Dimension of vector gradient data provided in write_block_vector_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, size * self.get_dimensions() * self.get_dimensions()) + assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_vector_gradient_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) + + self.thisptr.writeBlockVectorGradientData (data_id, size, _vertex_ids.data, _gradientValues.data) + + def write_scalar_gradient_data (self, data_id, vertex_id, gradientValues): + """ + Writes scalar gradient data to a vertex + This function writes the corresponding gradient matrix value of a specified vertex to a dataID. + + The gradients need to be provided in the following format: + + The 2D-format of gradientValues is (v_dx, v_dy) vector corresponding to the data block v = (v) + differentiated respectively in x-direction dx and y-direction dy + + The 3D-format of gradientValues is (v_dx, v_dy, v_dz) vector + corresponding to the data block v = (v) differentiated respectively in spatial directions x-direction dx and y-direction dy and z-direction dz + + Parameters + ---------- + data_id : int + ID to write to. + vertex_id : int + Index of the vertex. + gradientValue : array_like + A vector of the gradient values. + + Notes + ----- + Count of available elements at value matches the configured dimension + Vertex with dataID exists and contains data + Data with dataID has attribute hasGradient = true + + Previous calls: + initialize() has been called + + Examples + -------- + Write scalar data for a 2D problem: + >>> data_id = 1 + >>> vertex_id = 5 + >>> gradientValue = [v5_dx, v5_dy] + >>> interface.write_scalar_gradient_data(data_id, vertex_id, gradientValue) + """ + + check_array_like(gradientValues, "gradientValues", "write_scalar_gradient_data") + + if not isinstance(gradientValues, np.ndarray): + gradientValues = np.asarray(gradientValues) + + cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) + + assert _gradientValues.size == self.get_dimensions(), "Vector data provided for vertex {} in write_scalar_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, self.get_dimensions()) + + self.thisptr.writeScalarGradientData(data_id, vertex_id, _gradientValues.data) + + def write_vector_gradient_data (self, data_id, vertex_id, gradientValues): + """ + Writes vector gradient data to a vertex + This function writes the corresponding gradient matrix value of a specified vertex to a dataID. + + The gradients need to be provided in the following format: + + The 2D-format of \p gradientValues is (vx_dx, vy_dx, vx_dy, vy_dy) vector corresponding to the data block v = (vx, vy) + differentiated respectively in x-direction dx and y-direction dy + + The 3D-format of \p gradientValues is (vx_dx, vy_dx, vz_dx, vx_dy, vy_dy, vz_dy, vx_dz, vy_dz, vz_dz) vector + corresponding to the data block v = (vx, vy, vz) differentiated respectively in spatial directions x-direction dx and y-direction dy and z-direction dz + + Parameters + ---------- + data_id : int + ID to write to. + vertex_id : int + Index of the vertex. + gradientValue : array_like + A vector of the gradient values. + + Notes + ----- + Count of available elements at value matches the configured dimension + Vertex with dataID exists and contains data + Data with dataID has attribute hasGradient = true + + Previous calls: + initialize() has been called + + Examples + -------- + Write scalar data for a 2D problem: + >>> data_id = 1 + >>> vertex_id = 5 + >>> gradientValue = [v5x_dx, v5y_dx, v5x_dy,v5y_dy] + >>> interface.write_vector_gradient_data(data_id, vertex_id, gradientValue) + """ + + check_array_like(gradientValues, "gradientValues", "write_vector_gradient_data") + + if not isinstance(gradientValues, np.ndarray): + gradientValues = np.asarray(gradientValues) + + cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) + + assert _gradientValues.size == self.get_dimensions() * self.get_dimensions(), "Dimensions of vector gradient data provided for vertex {} in write_vector_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, self.get_dimensions() * self.get_dimensions()) + + self.thisptr.writeVectorGradientData(data_id, vertex_id, _gradientValues.data) + + def write_block_scalar_gradient_data (self, data_id, vertex_ids, gradientValues): + """ + Writes scalar gradient data given as block. This function writes values of specified vertices to a dataID. + Values are provided as a block of continuous memory. Values are stored in a numpy array [N x D] where N = number + of vertices and D = dimensions of geometry. + + Parameters + ---------- + data_id : int + Data ID to write to. + vertex_ids : array_like + Indices of the vertices. + gradientValues : array_like + Gradient values differentiated in the spacial direction (dx, dy) for 2D space, (dx, dy, dz) for 3D space + + Notes + ----- + Previous calls: + Count of available elements at values matches the configured dimension + Count of available elements at vertex_ids matches the given size + Initialize() has been called + Data with dataID has attribute hasGradient = true + + Examples + -------- + Write block gradient scalar data for a 2D problem with 2 vertices: + >>> data_id = 1 + >>> vertex_ids = [1, 2] + >>> gradientValues = np.array([[v1_dx, v1_dy], [v2_dx, v2_dy]]) + >>> interface.write_block_scalar_gradient_data(data_id, vertex_ids, gradientValues) + + Write block scalar data for a 3D (D=3) problem with 2 (N=2) vertices: + >>> data_id = 1 + >>> vertex_ids = [1, 2] + >>> values = np.array([[v1_dx, v1_dy, v1x_dz], [v2_dx, v2_dy, v2_dz]]) + >>> interface.write_block_scalar_gradient_data(data_id, vertex_ids, values) + """ + check_array_like(vertex_ids, "vertex_ids", "write_block_scalar_gradient_data") + check_array_like(gradientValues, "gradientValues", "write_block_sclar_gradient_data") + + if not isinstance(gradientValues, np.ndarray): + gradientValues = np.asarray(gradientValues) + + if len(gradientValues) > 0: + size, dimensions = gradientValues.shape + assert dimensions == self.get_dimensions() , "Dimensions of scalar gradient data provided in write_block_scalar_gradient_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) + if len(gradientValues) == 0: + size = 0 + + cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) + cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) + + assert _gradientValues.size == size * self.get_dimensions(), "Scalar gradient data is not provided for all vertices in write_block_scalar_gradient_data. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, size * self.get_dimensions()) + assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_scalar_gradient_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) + + self.thisptr.writeBlockScalarGradientData (data_id, size, _vertex_ids.data, _gradientValues.data) + + def is_gradient_data_required(self,data_id): + """ + Checks if the given data set requires gradient data. We check if the data object has been intialized with the gradient flag. + + Parameters + ---------- + data_id : int + Data ID to check. + + Returns + ------- + bool + True if gradient data is required for a dataID. + + Examples + -------- + Check if gradient data is required for a dataID: + >>> data_id = 1 + >>> interface.is_gradient_data_required(data_id) + """ + return self.thisptr.isGradientDataRequired(data_id) + + def set_mesh_access_region (self, mesh_id, bounding_box): """ This function is required if you don't want to use the mapping schemes in preCICE, but rather diff --git a/test/SolverInterface.cpp b/test/SolverInterface.cpp index c23fbf7a..08139a2f 100644 --- a/test/SolverInterface.cpp +++ b/test/SolverInterface.cpp @@ -395,6 +395,56 @@ void SolverInterface:: getMeshVerticesAndIDs } } +bool SolverInterface::isGradientDataRequired(int dataID) const +{ + return 0; +} + +void SolverInterface::writeBlockVectorGradientData( + int dataID, + int size, + const int *valueIndices, + const double *gradientValues) +{ + fake_read_write_buffer.clear(); + for (int i = 0; i < size * this->getDimensions() * this->getDimensions(); i++) { + fake_read_write_buffer.push_back(gradientValues[i]); + } +} + +void SolverInterface::writeScalarGradientData( + int dataID, + int valueIndex, + const double *gradientValues) +{ + fake_read_write_buffer.clear(); + for (int i = 0; i < this->getDimensions(); i++) { + fake_read_write_buffer.push_back(gradientValues[i]); + } +} +void SolverInterface::writeBlockScalarGradientData( + int dataID, + int size, + const int *valueIndices, + const double *gradientValues) +{ + fake_read_write_buffer.clear(); + for (int i = 0; i < size * this->getDimensions(); i++) { + fake_read_write_buffer.push_back(gradientValues[i]); + } +} + +void SolverInterface::writeVectorGradientData( + int dataID, + int valueIndex, + const double *gradientValues) +{ + fake_read_write_buffer.clear(); + for (int i = 0; i < this->getDimensions() * this->getDimensions(); i++) { + fake_read_write_buffer.push_back(gradientValues[i]); + } +} + std::string getVersionInformation() { std::string dummy ("dummy"); @@ -423,4 +473,4 @@ const std::string& actionReadIterationCheckpoint() } // namespace precice, constants -} // namespace precice +} // namespace precice \ No newline at end of file diff --git a/test/test_bindings_module.py b/test/test_bindings_module.py index 407a1e97..e20094b6 100644 --- a/test/test_bindings_module.py +++ b/test/test_bindings_module.py @@ -409,3 +409,143 @@ def test_get_mesh_vertices_and_ids(self): fake_ids, fake_coordinates = solver_interface.get_mesh_vertices_and_ids(fake_mesh_id) self.assertTrue(np.array_equal(fake_ids, vertex_ids)) self.assertTrue(np.array_equal(fake_coordinates, coordinates)) + + def test_is_gradient_data_required(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + fake_bool = 0 # compare to output in test/SolverInterface.cpp + fake_data_id = 0 + self.assertEqual(fake_bool, solver_interface.is_gradient_data_required(fake_data_id)) + + def test_write_block_scalar_gradient_data(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = np.array([[1, 2, 3], [6, 7, 8], [9, 10, 11]], dtype=np.double) + solver_interface.write_block_scalar_gradient_data(1, np.array([1, 2, 3]), write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_block_scalar_gradient_data_single_float(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + fake_dimension = 3 + n_fake_vertices = 4 + vertex_ids = np.arange(n_fake_vertices) + write_data = np.random.rand(n_fake_vertices, fake_dimension) + solver_interface.write_block_scalar_gradient_data(1, vertex_ids, write_data) + read_data = solver_interface.read_block_vector_data(1, vertex_ids) + self.assertTrue(np.array_equal(write_data, read_data)) + + def test_write_block_scalar_gradient_data_empty(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = np.array([]) + solver_interface.write_block_scalar_gradient_data(1, [], write_data) + read_data = solver_interface.read_block_scalar_data(1, []) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_block_scalar_gradient_data_non_contiguous(self): + """ + Tests behaviour of solver interface, if a non contiguous array is passed to the interface. + + Note: Check whether np.ndarray is contiguous via np.ndarray.flags. + """ + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + dummy_array = np.random.rand(3, 9) + write_data = dummy_array[:, 3:6] + assert write_data.flags["C_CONTIGUOUS"] is False + solver_interface.write_block_scalar_gradient_data(1, np.array([1, 2, 3]), write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_scalar_gradient_data(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + fake_dimension = 3 + write_data = np.random.rand(fake_dimension) + solver_interface.write_scalar_gradient_data(1, 1, write_data) + read_data = solver_interface.read_vector_data(1, 1) + self.assertTrue(np.array_equiv(write_data, read_data)) + + def test_write_block_vector_gradient_data(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + fake_dimension = 3 + n_fake_vertices = 4 + vertex_ids = np.arange(n_fake_vertices) + write_data = np.random.rand(n_fake_vertices, fake_dimension * fake_dimension) + solver_interface.write_block_vector_gradient_data(1, vertex_ids, write_data) + read_data = solver_interface.read_block_vector_data(1, np.array(range(n_fake_vertices * fake_dimension))) + self.assertTrue(np.array_equiv(write_data.flatten(), read_data.flatten())) + + def test_write_block_vector_gradient_data_empty(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = np.array([]) + solver_interface.write_block_vector_gradient_data(1, [], write_data) + read_data = solver_interface.read_block_scalar_data(1, []) + self.assertTrue(len(read_data) == 0) + + def test_write_block_vector_gradient_data_list(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = [[3.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0], [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 5.0]] + solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_block_vector_gradient_data_tuple(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = ((1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 3.0, 7.0, 8.0), (1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 5.0)) + solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_block_vector_gradient_data_mixed(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = [(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 3.0, 7.0, 8.0), (4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 7.0, 6.0, 5.0)] + solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_block_vector_gradient_data_non_contiguous(self): + """ + Tests behaviour of solver interface, if a non contiguous array is passed to the interface. + + Note: Check whether np.ndarray is contiguous via np.ndarray.flags. + """ + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + dummy_array = np.random.rand(3, 15) + write_data = dummy_array[:, 2:11] + assert write_data.flags["C_CONTIGUOUS"] is False + vertex_ids = np.arange(3) + solver_interface.write_block_vector_gradient_data(1, vertex_ids, write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(27))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_vector_gradient_data(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = np.arange(0, 9, dtype=np.double) + solver_interface.write_vector_gradient_data(1, 1, write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_vector_gradient_data_list(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + solver_interface.write_vector_gradient_data(1, 1, write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_vector_gradient_data_tuple(self): + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + write_data = (1.0, 2.0, 3.0, 9.0, 8.0, 7.0, 6.0, 5.0, 4.0) + solver_interface.write_vector_gradient_data(1, 1, write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) + + def test_write_vector_gradient_data_non_contiguous(self): + """ + Tests behaviour of solver interface, if a non contiguous array is passed to the interface. + + Note: Check whether np.ndarray is contiguous via np.ndarray.flags. + """ + solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + dummy_array = np.random.rand(9, 3) + write_data = dummy_array[:, 1] + assert write_data.flags["C_CONTIGUOUS"] is False + solver_interface.write_vector_gradient_data(1, 1, write_data) + read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten()))