From e9191c85b779ccb6fa00e355f6fb90a2e56ed044 Mon Sep 17 00:00:00 2001 From: Mark O'Connor Date: Thu, 21 Nov 2024 10:19:33 +0000 Subject: [PATCH] #0: remove debug code --- models/demos/llama3/tt/distributed_norm.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/models/demos/llama3/tt/distributed_norm.py b/models/demos/llama3/tt/distributed_norm.py index 8360c8006551..70cc23b602fb 100644 --- a/models/demos/llama3/tt/distributed_norm.py +++ b/models/demos/llama3/tt/distributed_norm.py @@ -11,10 +11,9 @@ def __init__(self, norm, args): self.norm = norm self.args = args - def forward(self, orig_x, mode): + def forward(self, x, mode): """Apply a norm, possibly gathering inputs if required.""" input_mem_cfg = self.norm.sharded_output_config if mode == "decode" else ttnn.DRAM_MEMORY_CONFIG - x = orig_x # Distributed norm already performs a gather if self.args.is_multichip and not self.args.is_distributed_norm(mode):