Skip to content

Commit

Permalink
#12040: add transpose trace sweeps (#13252)
Browse files Browse the repository at this point in the history
- add some extra permute trace sweeps
  • Loading branch information
sjameelTT authored Sep 30, 2024
1 parent a117410 commit 8d6ee5d
Show file tree
Hide file tree
Showing 5 changed files with 134 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/ttnn-run-sweeps.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ on:
- data_movement.slice.slice_pytorch2_tiled
- data_movement.permute.permute_pytorch2_rm
- data_movement.permute.permute_pytorch2_tiled
- data_movement.transpose.transpose_pytorch2
- data_movement.transpose.transpose_interleaved
schedule:
- cron: "0 21 * * *" # This cron schedule runs the workflow at 9:00pm UTC nightly

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,8 @@
# {"shape": ["s0 + 1", "s0 + 1", 16], "dims": [2, 0, 1]},
# {"shape": ["s0 + 1", "s0 + 1", 6], "dims": [2, 0, 1]},
# {"shape": ["s0 + 1", "s0 + 1", 8], "dims": [2, 0, 1]}
{"shape": [1, 16, 256, 64], "dims": [0, 2, 1, 3]},
{"shape": [1, 256, 16, 64], "dims": [0, 2, 1, 3]},
],
"dtype": [ttnn.bfloat16],
"layout": [ttnn.ROW_MAJOR_LAYOUT],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,8 @@
# {"shape": ["s0 + 1", "s0 + 1", 16], "dims": [2, 0, 1]},
# {"shape": ["s0 + 1", "s0 + 1", 6], "dims": [2, 0, 1]},
# {"shape": ["s0 + 1", "s0 + 1", 8], "dims": [2, 0, 1]}
{"shape": [1, 16, 256, 64], "dims": [0, 2, 1, 3]},
{"shape": [1, 256, 16, 64], "dims": [0, 2, 1, 3]},
],
"dtype": [ttnn.bfloat16, ttnn.bfloat8_b],
"layout": [ttnn.TILE_LAYOUT],
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# SPDX-FileCopyrightText: © 2024 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0

import torch
import random
import ttnn

from typing import Optional, Tuple

from tests.ttnn.utils_for_testing import check_with_pcc, start_measuring_time, stop_measuring_time
from models.utility_functions import torch_random


TIMEOUT = 20 # longer timeout since permute calls transpose recursively
random.seed(0)


def generate_transpose_shape(num_samples):
for _ in range(num_samples):
shape = [random.randint(1, 96) for _ in range(4)]
yield shape


parameters = {
"interleaved_4d": {
"shape": list(generate_transpose_shape(8)),
"dim0": [-4, -3, -2, -1, 0, 1, 2, 3],
"dim1": [-4, -3, -2, -1, 0, 1, 2, 3],
"layout": [ttnn.ROW_MAJOR_LAYOUT, ttnn.TILE_LAYOUT],
"dtype": [ttnn.bfloat16, ttnn.bfloat8_b],
}
}


def invalidate_vector(test_vector) -> Tuple[bool, Optional[str]]:
if test_vector["layout"] == ttnn.ROW_MAJOR_LAYOUT:
if test_vector["dtype"] == ttnn.bfloat8_b:
return True, "bfloat8_b not supported with ROW_MAJOR_LAYOUT"
return False, None


def run(
shape,
dim0,
dim1,
layout,
dtype,
*,
device,
):
torch_input_tensor = torch_random(shape, -0.1, 0.1, dtype=torch.bfloat16) # returns to torch tensor
torch_output_tensor = torch.transpose(torch_input_tensor, dim0, dim1)

ttnn_input_tensor = ttnn.from_torch(torch_input_tensor, device=device, dtype=dtype, layout=layout)

start_time = start_measuring_time()
ttnn_output = ttnn.transpose(ttnn_input_tensor, dim0, dim1)
e2e_perf = stop_measuring_time(start_time)

ttnn_output_tensor = ttnn.to_torch(ttnn_output)
return [check_with_pcc(torch_output_tensor, ttnn_output_tensor, 0.9999), e2e_perf]
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# SPDX-FileCopyrightText: © 2024 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0

import torch
import random
import ttnn

from typing import Optional, Tuple

from tests.ttnn.utils_for_testing import check_with_pcc, start_measuring_time, stop_measuring_time
from models.utility_functions import torch_random


TIMEOUT = 15 # longer timeout since permute calls transpose recursively
random.seed(0)

parameters = {
"nightly": {
"transpose_specs": [
{"shape": [1, 16, 256, 64], "dim0": -1, "dim1": -2},
{"shape": [1, 16, 256, 64], "dim0": 2, "dim1": 3},
{"shape": [1024, 1024], "dim0": -1, "dim1": -2},
{"shape": [1024, 4096], "dim0": -1, "dim1": -2},
{"shape": [2, 1024], "dim0": -1, "dim1": -2},
{"shape": [4096, 1024], "dim0": -1, "dim1": -2},
{"shape": [1024, 1024], "dim0": 0, "dim1": 1},
{"shape": [1024, 4096], "dim0": 0, "dim1": 1},
{"shape": [2, 1024], "dim0": 0, "dim1": 1},
{"shape": [4096, 1024], "dim0": 0, "dim1": 1},
{"shape": [1, 32, 12, 100], "dim0": -2, "dim1": -3},
],
"dtype": [ttnn.bfloat16, ttnn.bfloat8_b],
"layout": [ttnn.ROW_MAJOR_LAYOUT, ttnn.TILE_LAYOUT],
}
}


def invalidate_vector(test_vector) -> Tuple[bool, Optional[str]]:
if test_vector["layout"] == ttnn.ROW_MAJOR_LAYOUT:
if test_vector["dtype"] == ttnn.bfloat8_b:
return True, "bfloat8_b not supported with ROW_MAJOR_LAYOUT"

return False, None


def run(
transpose_specs,
dtype,
layout,
*,
device,
):
torch_input_tensor = torch_random(
transpose_specs["shape"], -0.1, 0.1, dtype=torch.bfloat16
) # returns to torch tensor
torch_output_tensor = torch.transpose(torch_input_tensor, transpose_specs["dim0"], transpose_specs["dim1"])

ttnn_input_tensor = ttnn.from_torch(torch_input_tensor, device=device, dtype=dtype, layout=layout)

start_time = start_measuring_time()
ttnn_output = ttnn.transpose(ttnn_input_tensor, transpose_specs["dim0"], transpose_specs["dim1"])
e2e_perf = stop_measuring_time(start_time)

ttnn_output_tensor = ttnn.to_torch(ttnn_output)
return [check_with_pcc(torch_output_tensor, ttnn_output_tensor, 0.9999), e2e_perf]

0 comments on commit 8d6ee5d

Please sign in to comment.