From 5030f71b36227a30a36505526751a90b2c738ad0 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 08:47:52 +0100 Subject: [PATCH 01/13] implement flipud --- CHANGELOG.md | 1 + heat/core/manipulations.py | 45 +++++++++++++++++++++++++++ heat/core/tests/test_manipulations.py | 27 ++++++++++++++++ 3 files changed, 73 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a84eda8ff0..2b6697d490 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ - [#429](https://github.com/helmholtz-analytics/heat/pull/429) Added PyTorch Jitter to inner function of matmul for increased speed - [#483](https://github.com/helmholtz-analytics/heat/pull/483) Bugfix: Underlying torch tensor moves to the right device on array initialisation - [#483](https://github.com/helmholtz-analytics/heat/pull/483) Bugfix:DNDarray.cpu() changes heat device to cpu +- [#496](https://github.com/helmholtz-analytics/heat/pull/496) New feature: flipud() # v0.3.0 diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index ebd5f4882c..676a0a5530 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -14,6 +14,7 @@ "diag", "diagonal", "expand_dims", + "flipud", "hstack", "resplit", "sort", @@ -556,6 +557,50 @@ def expand_dims(a, axis): ) +def flipud(a): + """ + Flip array in the up/down direction. + + Parameters + ---------- + a: ht.DNDarray + Input array to be flipped + + Returns + ------- + res: ht.DNDarray + The flipped array. + + Examples + -------- + >>> a = ht.array([[0,1],[2,3]]) + >>> ht.flipud(a) + tensor([[2, 3], + [0, 1]]) + """ + # Nothing to do + if a.numdims <= 1: + return a + + flipped = torch.flip(a._DNDarray__array, [0]) + + if a.split != 0: + return factories.array( + flipped, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm + ) + + # Need to redistribute tensors on axis 0 + lshape_map = a.create_lshape_map() + a.comm.Isend(flipped, dest=(a.comm.size - 1 - a.comm.rank)) + received = torch.empty( + tuple(lshape_map[(a.comm.size - 1 - a.comm.rank)]), dtype=a._DNDarray__array.dtype + ) + a.comm.Recv(received, source=(a.comm.size - 1 - a.comm.rank)) + res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) + res.balance_() # after swapping, first processes may be empty + return res + + def hstack(tup): """ Stack arrays in sequence horizontally (column wise). diff --git a/heat/core/tests/test_manipulations.py b/heat/core/tests/test_manipulations.py index 5e77ee6a03..23dd640984 100644 --- a/heat/core/tests/test_manipulations.py +++ b/heat/core/tests/test_manipulations.py @@ -763,6 +763,33 @@ def test_expand_dims(self): with self.assertRaises(ValueError): ht.empty((3, 4, 5), device=ht_device).expand_dims(-5) + def test_flipud(self): + a = ht.array([1, 2]) + self.assertTrue(ht.equal(ht.flipud(a), a)) + + b = ht.array([[1, 2], [3, 4]]) + r_b = ht.array([[3, 4], [1, 2]]) + self.assertTrue(ht.equal(ht.flipud(b), r_b)) + + # splitted + c = ht.array( + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=0 + ) + r_c = ht.array( + [[[12, 13], [14, 15]], [[8, 9], [10, 11]], [[4, 5], [6, 7]], [[0, 1], [2, 3]]], split=0 + ) + self.assertTrue(ht.equal(ht.flipud(c), r_c)) + + c = ht.array( + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=1 + ) + self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) + + c = ht.array( + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=2 + ) + self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) + def test_hstack(self): # cases to test: # MM=================================== From e15a1fa1ba26ea80eef8965dfe1d34814bd3d0f7 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 09:07:34 +0100 Subject: [PATCH 02/13] gpu in test --- heat/core/manipulations.py | 9 ++++----- heat/core/tests/test_manipulations.py | 22 +++++++++++++++------- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 676a0a5530..6629589b58 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -591,11 +591,10 @@ def flipud(a): # Need to redistribute tensors on axis 0 lshape_map = a.create_lshape_map() - a.comm.Isend(flipped, dest=(a.comm.size - 1 - a.comm.rank)) - received = torch.empty( - tuple(lshape_map[(a.comm.size - 1 - a.comm.rank)]), dtype=a._DNDarray__array.dtype - ) - a.comm.Recv(received, source=(a.comm.size - 1 - a.comm.rank)) + dest_proc = a.comm.size - 1 - a.comm.rank + a.comm.Isend(flipped, dest=dest_proc) + received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype) + a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty return res diff --git a/heat/core/tests/test_manipulations.py b/heat/core/tests/test_manipulations.py index 23dd640984..ed0ad6faa3 100644 --- a/heat/core/tests/test_manipulations.py +++ b/heat/core/tests/test_manipulations.py @@ -764,29 +764,37 @@ def test_expand_dims(self): ht.empty((3, 4, 5), device=ht_device).expand_dims(-5) def test_flipud(self): - a = ht.array([1, 2]) + a = ht.array([1, 2], device=ht_device) self.assertTrue(ht.equal(ht.flipud(a), a)) - b = ht.array([[1, 2], [3, 4]]) - r_b = ht.array([[3, 4], [1, 2]]) + b = ht.array([[1, 2], [3, 4]], device=ht_device) + r_b = ht.array([[3, 4], [1, 2]], device=ht_device) self.assertTrue(ht.equal(ht.flipud(b), r_b)) # splitted c = ht.array( - [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=0 + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], + split=0, + device=ht_device, ) r_c = ht.array( - [[[12, 13], [14, 15]], [[8, 9], [10, 11]], [[4, 5], [6, 7]], [[0, 1], [2, 3]]], split=0 + [[[12, 13], [14, 15]], [[8, 9], [10, 11]], [[4, 5], [6, 7]], [[0, 1], [2, 3]]], + split=0, + device=ht_device, ) self.assertTrue(ht.equal(ht.flipud(c), r_c)) c = ht.array( - [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=1 + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], + split=1, + device=ht_device, ) self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) c = ht.array( - [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=2 + [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], + split=2, + device=ht_device, ) self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) From fa4b590bb7c380b7416baa6145b0171149841c6d Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 09:14:28 +0100 Subject: [PATCH 03/13] set torch_device --- heat/core/manipulations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 6629589b58..fb4d626c24 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -593,7 +593,7 @@ def flipud(a): lshape_map = a.create_lshape_map() dest_proc = a.comm.size - 1 - a.comm.rank a.comm.Isend(flipped, dest=dest_proc) - received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype) + received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=device) a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty From 6eb16d446e89db2b8581537cb8ea78a8556acf6f Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 09:15:46 +0100 Subject: [PATCH 04/13] change device name --- heat/core/manipulations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index fb4d626c24..0a260b9fbc 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -593,7 +593,7 @@ def flipud(a): lshape_map = a.create_lshape_map() dest_proc = a.comm.size - 1 - a.comm.rank a.comm.Isend(flipped, dest=dest_proc) - received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=device) + received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device) a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty From d84edda5b3f75536f4e038a8b835388ea1d1be7a Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 10:47:10 +0100 Subject: [PATCH 05/13] Wait processes before returning --- heat/core/manipulations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 0a260b9fbc..9f464d2c58 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -592,11 +592,12 @@ def flipud(a): # Need to redistribute tensors on axis 0 lshape_map = a.create_lshape_map() dest_proc = a.comm.size - 1 - a.comm.rank - a.comm.Isend(flipped, dest=dest_proc) + req = a.comm.Isend(flipped, dest=dest_proc) received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device) a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty + req.Wait() return res From 6ac94baca067f0a79912011963bb478e92e2d878 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 11:03:20 +0100 Subject: [PATCH 06/13] formatting and types --- heat/core/manipulations.py | 4 +++- heat/core/tests/test_manipulations.py | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 9f464d2c58..7f76285a29 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -593,7 +593,9 @@ def flipud(a): lshape_map = a.create_lshape_map() dest_proc = a.comm.size - 1 - a.comm.rank req = a.comm.Isend(flipped, dest=dest_proc) - received = torch.empty(tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device) + received = torch.empty( + tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device + ) a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty diff --git a/heat/core/tests/test_manipulations.py b/heat/core/tests/test_manipulations.py index ed0ad6faa3..c3cc83be1e 100644 --- a/heat/core/tests/test_manipulations.py +++ b/heat/core/tests/test_manipulations.py @@ -788,6 +788,7 @@ def test_flipud(self): [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=1, device=ht_device, + dtype=ht.float32, ) self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) @@ -795,6 +796,7 @@ def test_flipud(self): [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=2, device=ht_device, + dtype=ht.int16, ) self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) From 090d670dc77e84a0ffc7700894cba420841b2390 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Tue, 10 Mar 2020 12:02:39 +0100 Subject: [PATCH 07/13] change types --- heat/core/manipulations.py | 2 ++ heat/core/tests/test_manipulations.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 7f76285a29..42fdaca155 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -592,11 +592,13 @@ def flipud(a): # Need to redistribute tensors on axis 0 lshape_map = a.create_lshape_map() dest_proc = a.comm.size - 1 - a.comm.rank + req = a.comm.Isend(flipped, dest=dest_proc) received = torch.empty( tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device ) a.comm.Recv(received, source=dest_proc) + res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) res.balance_() # after swapping, first processes may be empty req.Wait() diff --git a/heat/core/tests/test_manipulations.py b/heat/core/tests/test_manipulations.py index c3cc83be1e..fdf5b7de2a 100644 --- a/heat/core/tests/test_manipulations.py +++ b/heat/core/tests/test_manipulations.py @@ -796,7 +796,7 @@ def test_flipud(self): [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]], [[12, 13], [14, 15]]], split=2, device=ht_device, - dtype=ht.int16, + dtype=ht.int8, ) self.assertTrue(ht.equal(ht.resplit(ht.flipud(c), 0), r_c)) From 0ccbab21aac9e7cf129c1ab71097ea3e22f2a3cd Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Wed, 11 Mar 2020 09:38:52 +0100 Subject: [PATCH 08/13] corrected behaviour one dimension --- heat/core/manipulations.py | 4 ---- heat/core/tests/test_manipulations.py | 3 ++- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 42fdaca155..1b75ef700d 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -578,10 +578,6 @@ def flipud(a): tensor([[2, 3], [0, 1]]) """ - # Nothing to do - if a.numdims <= 1: - return a - flipped = torch.flip(a._DNDarray__array, [0]) if a.split != 0: diff --git a/heat/core/tests/test_manipulations.py b/heat/core/tests/test_manipulations.py index fdf5b7de2a..7a4f4fb7a4 100644 --- a/heat/core/tests/test_manipulations.py +++ b/heat/core/tests/test_manipulations.py @@ -765,7 +765,8 @@ def test_expand_dims(self): def test_flipud(self): a = ht.array([1, 2], device=ht_device) - self.assertTrue(ht.equal(ht.flipud(a), a)) + r_a = ht.array([2, 1], device=ht_device) + self.assertTrue(ht.equal(ht.flipud(a), r_a)) b = ht.array([[1, 2], [3, 4]], device=ht_device) r_b = ht.array([[3, 4], [1, 2]], device=ht_device) From 5a81c459002bde7750b13610f0fe01bd1e928717 Mon Sep 17 00:00:00 2001 From: mtar Date: Mon, 23 Mar 2020 08:05:56 +0100 Subject: [PATCH 09/13] add multiprocess example --- heat/core/manipulations.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 1b75ef700d..818fd8cc53 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -577,6 +577,11 @@ def flipud(a): >>> ht.flipud(a) tensor([[2, 3], [0, 1]]) + + >>> b = ht.array([[0,1,2],[3,4,5]], split=0) + >>> ht.flipud(b) + (1/2) tensor([3,4,5]) + (2/2) tensor([0,1,2]) """ flipped = torch.flip(a._DNDarray__array, [0]) From e8f0e329def49ad69ddf33252988321798a83480 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Mon, 23 Mar 2020 08:12:12 +0100 Subject: [PATCH 10/13] black formatting --- heat/core/manipulations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 818fd8cc53..3b3d7cdcf5 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -577,7 +577,7 @@ def flipud(a): >>> ht.flipud(a) tensor([[2, 3], [0, 1]]) - + >>> b = ht.array([[0,1,2],[3,4,5]], split=0) >>> ht.flipud(b) (1/2) tensor([3,4,5]) From 1cc18b14f5a71a3fc4f14a77389532e3bbc975c6 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Wed, 1 Apr 2020 08:47:08 +0200 Subject: [PATCH 11/13] replace lshape_map --- heat/core/manipulations.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 3b3d7cdcf5..492214ac3e 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -591,12 +591,19 @@ def flipud(a): ) # Need to redistribute tensors on axis 0 - lshape_map = a.create_lshape_map() + old_lshape = torch.tensor(a.lshape, device=a.device.torch_device) + + new_lshape = torch.empty((len(a.gshape),), dtype=int, device=a.device.torch_device) + dest_proc = a.comm.size - 1 - a.comm.rank + request = a.comm.Irecv(new_lshape, source=dest_proc) + a.comm.Send(old_lshape, dest_proc) + request.Wait() + req = a.comm.Isend(flipped, dest=dest_proc) received = torch.empty( - tuple(lshape_map[dest_proc]), dtype=a._DNDarray__array.dtype, device=a.device.torch_device + tuple(new_lshape), dtype=a._DNDarray__array.dtype, device=a.device.torch_device ) a.comm.Recv(received, source=dest_proc) From e269c077d542fd629b5590853f156aafcba7b756 Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Wed, 1 Apr 2020 10:23:01 +0200 Subject: [PATCH 12/13] use sendrecv --- heat/core/manipulations.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index 492214ac3e..7a08f06ad8 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -591,20 +591,12 @@ def flipud(a): ) # Need to redistribute tensors on axis 0 - old_lshape = torch.tensor(a.lshape, device=a.device.torch_device) - - new_lshape = torch.empty((len(a.gshape),), dtype=int, device=a.device.torch_device) - + old_lshape = a.lshape dest_proc = a.comm.size - 1 - a.comm.rank - - request = a.comm.Irecv(new_lshape, source=dest_proc) - a.comm.Send(old_lshape, dest_proc) - request.Wait() + new_lshape = a.comm.sendrecv(old_lshape, dest=dest_proc, source=dest_proc) req = a.comm.Isend(flipped, dest=dest_proc) - received = torch.empty( - tuple(new_lshape), dtype=a._DNDarray__array.dtype, device=a.device.torch_device - ) + received = torch.empty(new_lshape, dtype=a._DNDarray__array.dtype, device=a.device.torch_device) a.comm.Recv(received, source=dest_proc) res = factories.array(received, dtype=a.dtype, is_split=a.split, device=a.device, comm=a.comm) From 658814ba1662b389b04177512467ebdb4ca4ee1a Mon Sep 17 00:00:00 2001 From: Michael Tarnawa Date: Wed, 1 Apr 2020 14:43:05 +0200 Subject: [PATCH 13/13] formatting --- heat/core/manipulations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/heat/core/manipulations.py b/heat/core/manipulations.py index e272b95863..c5059e05bc 100644 --- a/heat/core/manipulations.py +++ b/heat/core/manipulations.py @@ -619,6 +619,7 @@ def flip(a, axis=None): req.Wait() return res + def flipud(a): """ Flip array in the up/down direction. @@ -646,7 +647,7 @@ def flipud(a): (2/2) tensor([0,1,2]) """ return flip(a, 0) - + def hstack(tup): """