Skip to content

Commit

Permalink
Use ttnn.TILE_SIZE instead of 32
Browse files Browse the repository at this point in the history
  • Loading branch information
jerrysky3 committed Sep 13, 2024
1 parent d4190f5 commit d270a13
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
1 change: 1 addition & 0 deletions tests/lowering/eltwise/unary/test_fill.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def forward(self, input, value):
(2, 32, 64),
(32, 1),
(1, 32),
(16, 64),
],
)
def test_fill_scalar(device, input_shape):
Expand Down
4 changes: 3 additions & 1 deletion torch_ttnn/passes/lowering/to_tt_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ def has_valid_page_size(shape, strict=False):
return False
if not strict and shape[-1] < 32:
return True
return shape[-2] % 32 == 0 and shape[-1] % 32 == 0
return shape[-2] % ttnn.TILE_SIZE == 0 and shape[-1] % ttnn.TILE_SIZE == 0


# override some functions from torch.fx.graph.Graph
Expand Down Expand Up @@ -441,6 +441,7 @@ def rewrite_node(node):
if node_user.target == torch.ops.aten.div.Tensor:
node_user.update_arg(1, args[1])
return None

if node.target == torch.ops.aten.fill.Scalar:
shape = tuple(node.meta["val"].size())
if has_valid_page_size(shape, strict=True):
Expand All @@ -454,6 +455,7 @@ def rewrite_node(node):
args=(shape,),
kwargs=new_kwargs,
)

if node.target == torch.ops.aten.baddbmm.default:
# out = beta * input + alpha * (batch1 @ batch2)
# if beta is 0, input is ignored, and nan and inf in it will not be propogated
Expand Down

0 comments on commit d270a13

Please sign in to comment.