Generate and update models ops tests and reimplement the models ops test failure updation script #2642
5986 tests run, 5658 passed, 13 skipped, 315 failed.
Annotations
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add0-[((2, 1, 2048), torch.float32), ((1, 2048), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[1.3600, 1.2153, 0.3787, ..., 0.6707, 1.7745, 0.3441]],
[[1.1024, 0.8027, 0.4666, ..., 0.8005, 1.5675, 0.1168]]]), compiled_model=tensor([[[1.3600e+00, 1.2153e+00, 3.7866e-01, ..., 6.7065e-01,
1.7745e+00, 3.4409e-01]],
[[9.6467e-01, 4.3116e-01, 8.6057e-01, ..., 4.2634e+33,
8.6740e+33, 1.7471e+13]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add0'>, [((2, 1, 2048), torch.float32), ((1, 2048), torch.float32)], {'model_name': ['pt_stereo_facebook_musicgen_large_music_generation_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb617a39ea0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb617a1c340>
fw_out = tensor([[[1.3600, 1.2153, 0.3787, ..., 0.6707, 1.7745, 0.3441]],
[[1.1024, 0.8027, 0.4666, ..., 0.8005, 1.5675, 0.1168]]])
co_out = tensor([[[1.3600e+00, 1.2153e+00, 3.7866e-01, ..., 6.7065e-01,
1.7745e+00, 3.4409e-01]],
[[9.6467e-01, 4.3116e-01, 8.6057e-01, ..., 4.2634e+33,
8.6740e+33, 1.7471e+13]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[1.3600, 1.2153, 0.3787, ..., 0.6707, 1.7745, 0.3441]],
E
E [[1.1024, 0.8027, 0.4666, ..., 0.8005, 1.5675, 0.1168]]]), compiled_model=tensor([[[1.3600e+00, 1.2153e+00, 3.7866e-01, ..., 6.7065e-01,
E 1.7745e+00, 3.4409e-01]],
E
E [[9.6467e-01, 4.3116e-01, 8.6057e-01, ..., 4.2634e+33,
E 8.6740e+33, 1.7471e+13]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add0-[((2, 1, 1536), torch.float32), ((1, 1536), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
[[1.3005, 1.5406, 0.7837, ..., 1.5979, 0.8316, 1.7089]]]), compiled_model=tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
[[0.5513, 1.4046, 1.5054, ..., 0.6761, 1.2908, 1.5603]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add0'>, [((2, 1, 1536), torch.float32), ((1, 1536), torch.float32)], {'model_name': ['pt_stereo_facebook_musicgen_medium_music_generation_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb6142879a0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb6171d6a70>
fw_out = tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
[[1.3005, 1.5406, 0.7837, ..., 1.5979, 0.8316, 1.7089]]])
co_out = tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
[[0.5513, 1.4046, 1.5054, ..., 0.6761, 1.2908, 1.5603]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
E
E [[1.3005, 1.5406, 0.7837, ..., 1.5979, 0.8316, 1.7089]]]), compiled_model=tensor([[[1.4631, 1.6477, 0.1510, ..., 1.4470, 0.5821, 1.1420]],
E
E [[0.5513, 1.4046, 1.5054, ..., 0.6761, 1.2908, 1.5603]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add0-[((2, 1, 1024), torch.float32), ((1, 1024), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
[[0.7037, 0.3834, 0.3881, ..., 1.2408, 1.5215, 1.0661]]]), compiled_model=tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
[[0.5946, 0.1729, 0.9468, ..., 0.5875, 0.8263, 0.2909]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add0'>, [((2, 1, 1024), torch.float32), ((1, 1024), torch.float32)], {'model_name': ['pt_stereo_facebook_musicgen_small_music_generation_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb6142877f0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb617478190>
fw_out = tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
[[0.7037, 0.3834, 0.3881, ..., 1.2408, 1.5215, 1.0661]]])
co_out = tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
[[0.5946, 0.1729, 0.9468, ..., 0.5875, 0.8263, 0.2909]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
E
E [[0.7037, 0.3834, 0.3881, ..., 1.2408, 1.5215, 1.0661]]]), compiled_model=tensor([[[0.7350, 1.1238, 0.2649, ..., 0.8409, 0.9051, 1.4962]],
E
E [[0.5946, 0.1729, 0.9468, ..., 0.5875, 0.8263, 0.2909]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add46-[((1, 256), torch.int64)]]
TypeError: Dtype mismatch: framework_model.dtype=torch.int64, compiled_model.dtype=torch.int32
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add46'>, [((1, 256), torch.int64)], {'model_name': ['pt_opt_facebook_opt_1_3b_clm_hf', 'pt_opt_facebook_opt_125m_clm_hf', 'pt_opt_facebook_opt_350m_clm_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb614232b00>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
inputs = [Forge Tensor: tensor([[ 44, 239, 933, 760, 963, 379, 427, 503, 497, 683, 101, 866, 756, 399,
878, 376, 56, ...537, 288, 420, 265, 830, 413, 965, 795, 833, 696, 552, 532, 35, 475,
169, 374, 973, 703]]), DataFormat.Int32]
framework_model = Module Add46
compiled_model = <forge.compiled_graph_state.CompiledModel object at 0x7fb5d0daf3a0>
verify_cfg = VerifyConfig(enabled=True, verify_size=True, verify_dtype=True, verify_shape=True, verify_values=True, value_checker=<forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb617078d60>, dump_tensors=False, dump_tensors_path='')
def verify(
inputs: List[Union[torch.Tensor, tf.Tensor, tf.Variable]],
framework_model: Union[torch.nn.Module, tf.Module, tf.keras.Model],
compiled_model: CompiledModel,
verify_cfg: VerifyConfig = VerifyConfig(),
):
"""
Verify the compiled model against the framework model
"""
if not verify_cfg.enabled:
logger.warning("Verification is disabled")
return
# 0th step: input checks
# Check if inputs are of the correct type
if not inputs:
raise ValueError("Input tensors must be provided")
for input_tensor in inputs:
if not isinstance(input_tensor, verify_cfg.supported_tensor_types):
raise TypeError(
f"Input tensor must be of type {verify_cfg.supported_tensor_types}, but got {type(input_tensor)}"
)
if not isinstance(framework_model, verify_cfg.framework_model_types):
raise TypeError(
f"Framework model must be of type {verify_cfg.framework_model_types}, but got {type(framework_model)}"
)
if not isinstance(compiled_model, verify_cfg.compiled_model_types):
raise TypeError(
f"Compiled model must be of type {verify_cfg.compiled_model_types}, but got {type(compiled_model)}"
)
# 1st step: run forward pass for the networks
fw_out = framework_model(*inputs)
co_out = compiled_model(*inputs)
# 2nd step: apply preprocessing (push tensors to cpu, perform any reshape if necessary,
# cast from tensorflow tensors to pytorch tensors if needed)
fw_out = to_pt_tensors(fw_out)
assert all(isinstance(co, torch.Tensor) for co in co_out), f"Compiled model output is not a list of torch.Tensor"
co_out = [co.to("cpu") for co in co_out]
# 3rd step: verifications of outputs
# - size check
# - dtype check
# - shape check
# - compare with golden
if verify_cfg.verify_size:
if len(fw_out) != len(co_out):
raise ValueError(
f"Number of outputs from framework model and compiled model do not match: framework model has {len(fw_out)} outputs, compiled model has {len(co_out)} outputs"
)
for fw, co in zip(fw_out, co_out):
if verify_cfg.verify_dtype:
if fw.dtype != co.dtype:
> raise TypeError(f"Dtype mismatch: framework_model.dtype={fw.dtype}, compiled_model.dtype={co.dtype}")
E TypeError: Dtype mismatch: framework_model.dtype=torch.int64, compiled_model.dtype=torch.int32
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:326: TypeError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 120, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1925]],
[[1.4644]],
[[0.7847]],
[[0.8282]],
[[1.0036]],
[[1.3303]],
[[1.1863]],
[[1.5927]],
[[1.1518]],
[[1.3285]],
[[1.0451]],
[[1.0979]],
[[0.7185]],
[[0.8651]],
[[0.9901]],
[[1.2147]],
[[1.3939]],
[[1.4962]],
[[0.8572]],
[[0.9785]],
[[1.3778]],
[[1.6114]],
[[1.0933]],
[[1.5704]],
[[1.1156]],
[[1.2491]],
[[1.6490]],
[[0.7324]],
[[0.8814]],
[[1.0696]],
[[1.0013]],
[[1.6282]],
[[0.8721]],
[[0.9660]],
[[0.8469]],
[[0.7279]],
[[0.9043]],
[[1.6260]],
[[1.4193]],
[[1.4386]],
[[1.2225]],
[[0.9399]],
[[1.2808]],
[[0.7294]],
[[0.8349]],
[[0.9384]],
[[1.5117]],
[[1.4894]],
[[0.9745]],
[[1.1782]],
[[1.5160]],
[[1.6933]],
[[1.3947]],
[[1.2638]],
[[1.5315]],
[[0.9018]],
[[1.2894]],
[[0.8086]],
[[0.8497]],
[[0.9379]],
[[1.4225]],
[[1.3973]],
[[0.9000]],
[[1.3473]],
[[1.4707]],
[[1.1331]],
[[1.2153]],
[[1.3121]],
[[1.5064]],
[[1.6763]],
[[0.8109]],
[[1.0130]],
[[1.3927]],
[[1.6105]],
[[1.6313]],
[[1.6374]],
[[1.2957]],
[[0.7614]],
[[1.2422]],
[[0.8834]],
[[0.7302]],
[[1.6405]],
[[1.5764]],
[[0.6975]],
[[1.2898]],
[[1.1120]],
[[1.1139]],
[[0.9673]],
[[1.3885]],
[[0.9001]],
[[1.3795]],
[[1.4491]],
[[1.5541]],
[[1.3832]],
[[0.7013]],
[[0.8719]],
[[1.4459]],
[[1.3009]],
[[0.8062]],
[[0.9083]],
[[1.6666]],
[[1.5331]],
[[0.9782]],
[[1.0704]],
[[0.7199]],
[[1.1872]],
[[0.8197]],
[[0.8105]],
[[1.1687]],
[[1.2713]],
[[0.9914]],
[[1.4929]],
[[0.8919]],
[[1.6499]],
[[1.5389]],
[[0.7746]],
[[1.0718]],
[[1.2188]],
[[1.2692]],
[[1.3148]]]]), compiled_model=tensor([[[[1.1925]],
[[0.8148]],
[[0.6633]],
[[0.2287]],
[[1.3056]],
[[0.9863]],
[[1.1027]],
[[1.2537]],
[[0.7629]],
[[1.1383]],
[[1.3192]],
[[0.9223]],
[[0.1159]],
[[0.7854]],
[[0.8128]],
[[0.6727]],
[[0.8069]],
[[1.3291]],
[[0.8705]],
[[0.8340]],
[[0.6928]],
[[1.1125]],
[[1.0079]],
[[1.1160]],
[[0.6278]],
[[1.3501]],
[[1.2956]],
[[0.9238]],
[[0.8133]],
[[0.8396]],
[[0.5405]],
[[1.1268]],
[[0.6458]],
[[1.0180]],
[[0.4824]],
[[0.0414]],
[[0.7002]],
[[1.5226]],
[[1.6835]],
[[0.8462]],
[[1.1809]],
[[1.1559]],
[[0.9756]],
[[0.5071]],
[[0.9486]],
[[0.4561]],
[[1.2770]],
[[0.8888]],
[[1.7509]],
[[1.6343]],
[[2.6938]],
[[1.9780]],
[[2.1135]],
[[1.4382]],
[[2.2766]],
[[1.3394]],
[[1.5243]],
[[0.4717]],
[[1.1535]],
[[0.5437]],
[[2.3805]],
[[2.1952]],
[[1.8718]],
[[1.7292]],
[[2.4024]],
[[1.7025]],
[[1.2691]],
[[1.2936]],
[[1.7511]],
[[1.7696]],
[[0.8046]],
[[1.3133]],
[[1.4113]],
[[1.5271]],
[[2.2408]],
[[2.2586]],
[[1.4315]],
[[1.0232]],
[[0.8507]],
[[0.5651]],
[[1.6746]],
[[2.3368]],
[[2.5706]],
[[1.0940]],
[[1.7166]],
[[1.2639]],
[[1.2058]],
[[1.0001]],
[[2.1825]],
[[1.5896]],
[[1.9450]],
[[2.1591]],
[[2.6421]],
[[1.7768]],
[[1.0637]],
[[1.9813]],
[[2.1842]],
[[1.6359]],
[[0.6158]],
[[1.3820]],
[[1.9801]],
[[1.4038]],
[[0.7981]],
[[1.5333]],
[[0.7468]],
[[1.2454]],
[[1.1196]],
[[1.0777]],
[[2.2947]],
[[1.4906]],
[[0.5606]],
[[1.1612]],
[[1.3862]],
[[1.9556]],
[[1.8524]],
[[1.3284]],
[[0.8848]],
[[1.4884]],
[[1.7438]],
[[1.1415]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 120, 1, 1), torch.float32)], {'model_name': ['pt_ghostnet_ghostnet_100..._cls_timm', 'pt_mobilnetv3_mobilenetv3_large_100_img_cls_timm', 'pt_mobilenetv3_mobilenet_v3_large_img_cls_torchhub']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5e7eda5f0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0dae890>
fw_out = tensor([[[[1.1925]],
[[1.4644]],
[[0.7847]],
[[0.8282]],
[[1.0036]],
...5389]],
[[0.7746]],
[[1.0718]],
[[1.2188]],
[[1.2692]],
[[1.3148]]]])
co_out = tensor([[[[1.1925]],
[[0.8148]],
[[0.6633]],
[[0.2287]],
[[1.3056]],
...8524]],
[[1.3284]],
[[0.8848]],
[[1.4884]],
[[1.7438]],
[[1.1415]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1925]],
E
E [[1.4644]],
E
E [[0.7847]],
E
E [[0.8282]],
E
E [[1.0036]],
E
E [[1.3303]],
E
E [[1.1863]],
E
E [[1.5927]],
E
E [[1.1518]],
E
E [[1.3285]],
E
E [[1.0451]],
E
E [[1.0979]],
E
E [[0.7185]],
E
E [[0.8651]],
E
E [[0.9901]],
E
E [[1.2147]],
E
E [[1.3939]],
E
E [[1.4962]],
E
E [[0.8572]],
E
E [[0.9785]],
E
E [[1.3778]],
E
E [[1.6114]],
E
E [[1.0933]],
E
E [[1.5704]],
E
E [[1.1156]],
E
E [[1.2491]],
E
E [[1.6490]],
E
E [[0.7324]],
E
E [[0.8814]],
E
E [[1.0696]],
E
E [[1.0013]],
E
E [[1.6282]],
E
E [[0.8721]],
E
E [[0.9660]],
E
E [[0.8469]],
E
E [[0.7279]],
E
E [[0.9043]],
E
E [[1.6260]],
E
E [[1.4193]],
E
E [[1.4386]],
E
E [[1.2225]],
E
E [[0.9399]],
E
E [[1.2808]],
E
E [[0.7294]],
E
E [[0.8349]],
E
E [[0.9384]],
E
E [[1.5117]],
E
E [[1.4894]],
E
E [[0.9745]],
E
E [[1.1782]],
E
E [[1.5160]],
E
E [[1.6933]],
E
E [[1.3947]],
E
E [[1.2638]],
E
E [[1.5315]],
E
E [[0.9018]],
E
E [[1.2894]],
E
E [[0.8086]],
E
E [[0.8497]],
E
E [[0.9379]],
E
E [[1.4225]],
E
E [[1.3973]],
E
E [[0.9000]],
E
E [[1.3473]],
E
E [[1.4707]],
E
E [[1.1331]],
E
E [[1.2153]],
E
E [[1.3121]],
E
E [[1.5064]],
E
E [[1.6763]],
E
E [[0.8109]],
E
E [[1.0130]],
E
E [[1.3927]],
E
E [[1.6105]],
E
E [[1.6313]],
E
E [[1.6374]],
E
E [[1.2957]],
E
E [[0.7614]],
E
E [[1.2422]],
E
E [[0.8834]],
E
E [[0.7302]],
E
E [[1.6405]],
E
E [[1.5764]],
E
E [[0.6975]],
E
E [[1.2898]],
E
E [[1.1120]],
E
E [[1.1139]],
E
E [[0.9673]],
E
E [[1.3885]],
E
E [[0.9001]],
E
E [[1.3795]],
E
E [[1.4491]],
E
E [[1.5541]],
E
E [[1.3832]],
E
E [[0.7013]],
E
E [[0.8719]],
E
E [[1.4459]],
E
E [[1.3009]],
E
E [[0.8062]],
E
E [[0.9083]],
E
E [[1.6666]],
E
E [[1.5331]],
E
E [[0.9782]],
E
E [[1.0704]],
E
E [[0.7199]],
E
E [[1.1872]],
E
E [[0.8197]],
E
E [[0.8105]],
E
E [[1.1687]],
E
E [[1.2713]],
E
E [[0.9914]],
E
E [[1.4929]],
E
E [[0.8919]],
E
E [[1.6499]],
E
E [[1.5389]],
E
E [[0.7746]],
E
E [[1.0718]],
E
E [[1.2188]],
E
E [[1.2692]],
E
E [[1.3148]]]]), compiled_model=tensor([[[[1.1925]],
E
E [[0.8148]],
E
E [[0.6633]],
E
E [[0.2287]],
E
E [[1.3056]],
E
E [[0.9863]],
E
E [[1.1027]],
E
E [[1.2537]],
E
E [[0.7629]],
E
E [[1.1383]],
E
E [[1.3192]],
E
E [[0.9223]],
E
E [[0.1159]],
E
E [[0.7854]],
E
E [[0.8128]],
E
E [[0.6727]],
E
E [[0.8069]],
E
E [[1.3291]],
E
E [[0.8705]],
E
E [[0.8340]],
E
E [[0.6928]],
E
E [[1.1125]],
E
E [[1.0079]],
E
E [[1.1160]],
E
E [[0.6278]],
E
E [[1.3501]],
E
E [[1.2956]],
E
E [[0.9238]],
E
E [[0.8133]],
E
E [[0.8396]],
E
E [[0.5405]],
E
E [[1.1268]],
E
E [[0.6458]],
E
E [[1.0180]],
E
E [[0.4824]],
E
E [[0.0414]],
E
E [[0.7002]],
E
E [[1.5226]],
E
E [[1.6835]],
E
E [[0.8462]],
E
E [[1.1809]],
E
E [[1.1559]],
E
E [[0.9756]],
E
E [[0.5071]],
E
E [[0.9486]],
E
E [[0.4561]],
E
E [[1.2770]],
E
E [[0.8888]],
E
E [[1.7509]],
E
E [[1.6343]],
E
E [[2.6938]],
E
E [[1.9780]],
E
E [[2.1135]],
E
E [[1.4382]],
E
E [[2.2766]],
E
E [[1.3394]],
E
E [[1.5243]],
E
E [[0.4717]],
E
E [[1.1535]],
E
E [[0.5437]],
E
E [[2.3805]],
E
E [[2.1952]],
E
E [[1.8718]],
E
E [[1.7292]],
E
E [[2.4024]],
E
E [[1.7025]],
E
E [[1.2691]],
E
E [[1.2936]],
E
E [[1.7511]],
E
E [[1.7696]],
E
E [[0.8046]],
E
E [[1.3133]],
E
E [[1.4113]],
E
E [[1.5271]],
E
E [[2.2408]],
E
E [[2.2586]],
E
E [[1.4315]],
E
E [[1.0232]],
E
E [[0.8507]],
E
E [[0.5651]],
E
E [[1.6746]],
E
E [[2.3368]],
E
E [[2.5706]],
E
E [[1.0940]],
E
E [[1.7166]],
E
E [[1.2639]],
E
E [[1.2058]],
E
E [[1.0001]],
E
E [[2.1825]],
E
E [[1.5896]],
E
E [[1.9450]],
E
E [[2.1591]],
E
E [[2.6421]],
E
E [[1.7768]],
E
E [[1.0637]],
E
E [[1.9813]],
E
E [[2.1842]],
E
E [[1.6359]],
E
E [[0.6158]],
E
E [[1.3820]],
E
E [[1.9801]],
E
E [[1.4038]],
E
E [[0.7981]],
E
E [[1.5333]],
E
E [[0.7468]],
E
E [[1.2454]],
E
E [[1.1196]],
E
E [[1.0777]],
E
E [[2.2947]],
E
E [[1.4906]],
E
E [[0.5606]],
E
E [[1.1612]],
E
E [[1.3862]],
E
E [[1.9556]],
E
E [[1.8524]],
E
E [[1.3284]],
E
E [[0.8848]],
E
E [[1.4884]],
E
E [[1.7438]],
E
E [[1.1415]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 16, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1939]],
[[1.4659]],
[[0.7861]],
[[0.8297]],
[[1.0051]],
[[1.3317]],
[[1.1878]],
[[1.5941]],
[[1.1533]],
[[1.3300]],
[[1.0466]],
[[1.0994]],
[[0.7200]],
[[0.8665]],
[[0.9916]],
[[1.2162]]]]), compiled_model=tensor([[[[1.1939]],
[[1.3211]],
[[1.0412]],
[[0.1682]],
[[0.4927]],
[[1.0075]],
[[0.7952]],
[[1.8284]],
[[0.6315]],
[[0.9021]],
[[0.4996]],
[[0.4334]],
[[0.2305]],
[[1.0987]],
[[1.0170]],
[[1.2609]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 16, 1, 1), torch.float32)], {'model_name': ['pt_mobilenetv3_mobilenet_v3_small_img_cls_torchhub', 'pt_mobilnetv3_mobilenetv3_small_100_img_cls_timm']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0b78430>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0daf520>
fw_out = tensor([[[[1.1939]],
[[1.4659]],
[[0.7861]],
[[0.8297]],
[[1.0051]],
...0466]],
[[1.0994]],
[[0.7200]],
[[0.8665]],
[[0.9916]],
[[1.2162]]]])
co_out = tensor([[[[1.1939]],
[[1.3211]],
[[1.0412]],
[[0.1682]],
[[0.4927]],
...4996]],
[[0.4334]],
[[0.2305]],
[[1.0987]],
[[1.0170]],
[[1.2609]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1939]],
E
E [[1.4659]],
E
E [[0.7861]],
E
E [[0.8297]],
E
E [[1.0051]],
E
E [[1.3317]],
E
E [[1.1878]],
E
E [[1.5941]],
E
E [[1.1533]],
E
E [[1.3300]],
E
E [[1.0466]],
E
E [[1.0994]],
E
E [[0.7200]],
E
E [[0.8665]],
E
E [[0.9916]],
E
E [[1.2162]]]]), compiled_model=tensor([[[[1.1939]],
E
E [[1.3211]],
E
E [[1.0412]],
E
E [[0.1682]],
E
E [[0.4927]],
E
E [[1.0075]],
E
E [[0.7952]],
E
E [[1.8284]],
E
E [[0.6315]],
E
E [[0.9021]],
E
E [[0.4996]],
E
E [[0.4334]],
E
E [[0.2305]],
E
E [[1.0987]],
E
E [[1.0170]],
E
E [[1.2609]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 240, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1614]],
[[1.4333]],
[[0.7536]],
[[0.7971]],
[[0.9725]],
[[1.2992]],
[[1.1552]],
[[1.5615]],
[[1.1207]],
[[1.2974]],
[[1.0140]],
[[1.0668]],
[[0.6874]],
[[0.8340]],
[[0.9590]],
[[1.1836]],
[[1.3628]],
[[1.4651]],
[[0.8261]],
[[0.9474]],
[[1.3467]],
[[1.5803]],
[[1.0622]],
[[1.5393]],
[[1.0845]],
[[1.2180]],
[[1.6178]],
[[0.7013]],
[[0.8503]],
[[1.0385]],
[[0.9702]],
[[1.5971]],
[[0.8410]],
[[0.9349]],
[[0.8158]],
[[0.6968]],
[[0.8732]],
[[1.5949]],
[[1.3882]],
[[1.4074]],
[[1.1914]],
[[0.9088]],
[[1.2497]],
[[0.6982]],
[[0.8038]],
[[0.9073]],
[[1.4806]],
[[1.4583]],
[[0.9433]],
[[1.1471]],
[[1.4849]],
[[1.6622]],
[[1.3635]],
[[1.2326]],
[[1.5003]],
[[0.8707]],
[[1.2583]],
[[0.7774]],
[[0.8186]],
[[0.9068]],
[[1.3913]],
[[1.3662]],
[[0.8689]],
[[1.3161]],
[[1.4396]],
[[1.1020]],
[[1.1842]],
[[1.2809]],
[[1.4753]],
[[1.6452]],
[[0.7798]],
[[0.9819]],
[[1.3616]],
[[1.5794]],
[[1.6002]],
[[1.6063]],
[[1.2646]],
[[0.7303]],
[[1.2111]],
[[0.8523]],
[[0.6991]],
[[1.6093]],
[[1.5453]],
[[0.6663]],
[[1.2587]],
[[1.0809]],
[[1.0828]],
[[0.9362]],
[[1.3574]],
[[0.8689]],
[[1.3484]],
[[1.4180]],
[[1.5230]],
[[1.3521]],
[[0.6702]],
[[0.8407]],
[[1.4148]],
[[1.2697]],
[[0.7751]],
[[0.8772]],
[[1.6355]],
[[1.5020]],
[[0.9471]],
[[1.0393]],
[[0.6888]],
[[1.1561]],
[[0.7886]],
[[0.7794]],
[[1.1375]],
[[1.2402]],
[[0.9603]],
[[1.4618]],
[[0.8608]],
[[1.6188]],
[[1.5077]],
[[0.7435]],
[[1.0407]],
[[1.1877]],
[[1.2380]],
[[1.2837]],
[[1.3613]],
[[1.1950]],
[[0.9211]],
[[1.4017]],
[[0.6855]],
[[0.8687]],
[[1.0399]],
[[0.9215]],
[[0.9902]],
[[0.7553]],
[[1.0587]],
[[1.2720]],
[[0.8394]],
[[1.1394]],
[[1.5230]],
[[1.1137]],
[[1.1790]],
[[1.1220]],
[[1.2663]],
[[1.4830]],
[[1.6387]],
[[1.4826]],
[[1.6398]],
[[1.1289]],
[[0.7159]],
[[0.9281]],
[[1.5055]],
[[1.1619]],
[[0.9166]],
[[0.7819]],
[[0.6972]],
[[0.7431]],
[[1.0637]],
[[1.4393]],
[[1.4354]],
[[0.6829]],
[[1.4770]],
[[0.7738]],
[[1.0594]],
[[0.9624]],
[[1.0688]],
[[1.0669]],
[[0.7164]],
[[0.7334]],
[[1.0869]],
[[1.1716]],
[[0.9380]],
[[1.3534]],
[[0.7151]],
[[1.1314]],
[[1.6048]],
[[0.9612]],
[[1.6166]],
[[1.3462]],
[[0.7139]],
[[1.4814]],
[[1.1074]],
[[0.9419]],
[[1.5649]],
[[0.7610]],
[[1.2187]],
[[1.0604]],
[[1.5222]],
[[1.3047]],
[[1.4053]],
[[1.3417]],
[[1.0449]],
[[1.0599]],
[[0.7531]],
[[1.4360]],
[[1.5621]],
[[1.5072]],
[[0.8124]],
[[1.1874]],
[[0.8126]],
[[0.8899]],
[[0.8737]],
[[1.3360]],
[[0.8671]],
[[1.1542]],
[[1.1861]],
[[1.4874]],
[[0.7871]],
[[0.8218]],
[[0.8748]],
[[1.5151]],
[[0.9854]],
[[1.5868]],
[[1.3459]],
[[1.2284]],
[[1.1614]],
[[1.0663]],
[[1.2278]],
[[1.0509]],
[[1.1616]],
[[1.2289]],
[[0.7740]],
[[0.9030]],
[[1.5688]],
[[0.7593]],
[[1.1292]],
[[1.6597]],
[[1.3457]],
[[1.1793]],
[[0.7318]],
[[1.4128]],
[[0.8090]],
[[1.0232]],
[[0.9973]],
[[1.0911]],
[[1.1706]],
[[1.5775]],
[[1.2275]],
[[1.6129]],
[[1.4710]],
[[0.8490]],
[[1.3893]],
[[0.8116]],
[[0.9532]],
[[1.3122]]]]), compiled_model=tensor([[[[1.1614]],
[[1.5250]],
[[1.7245]],
[[0.9210]],
[[1.3506]],
[[1.6945]],
[[0.8003]],
[[2.0041]],
[[1.3812]],
[[1.5068]],
[[0.6095]],
[[0.6455]],
[[1.2008]],
[[1.9356]],
[[1.2990]],
[[1.6350]],
[[1.2477]],
[[1.5347]],
[[0.8691]],
[[0.4297]],
[[1.2928]],
[[1.7325]],
[[1.5078]],
[[2.4640]],
[[0.8934]],
[[1.9886]],
[[2.6152]],
[[1.1116]],
[[1.2592]],
[[1.4635]],
[[1.7133]],
[[1.7562]],
[[1.4653]],
[[0.9121]],
[[0.5602]],
[[1.0100]],
[[0.9547]],
[[1.8345]],
[[1.3018]],
[[1.6498]],
[[1.5868]],
[[1.5327]],
[[1.3922]],
[[0.7407]],
[[0.9190]],
[[0.9815]],
[[1.7915]],
[[1.4641]],
[[0.9672]],
[[1.7066]],
[[2.0952]],
[[1.9136]],
[[1.5808]],
[[1.3044]],
[[2.0042]],
[[0.9785]],
[[1.1542]],
[[0.9825]],
[[0.8566]],
[[0.8018]],
[[1.3843]],
[[1.1908]],
[[0.5914]],
[[0.8428]],
[[1.6203]],
[[0.5647]],
[[1.2239]],
[[0.9477]],
[[1.0690]],
[[1.5699]],
[[0.3550]],
[[0.9320]],
[[1.2947]],
[[1.0430]],
[[1.5184]],
[[1.6541]],
[[1.2974]],
[[0.5023]],
[[0.6361]],
[[0.6101]],
[[0.7077]],
[[1.2618]],
[[1.5700]],
[[0.8342]],
[[0.8325]],
[[0.9207]],
[[1.1245]],
[[0.8103]],
[[1.2340]],
[[0.7663]],
[[0.7902]],
[[1.2921]],
[[1.7042]],
[[1.6375]],
[[0.7990]],
[[0.7427]],
[[1.4832]],
[[0.8614]],
[[0.1956]],
[[0.2821]],
[[1.9692]],
[[1.6543]],
[[0.4364]],
[[1.0698]],
[[0.9013]],
[[1.4908]],
[[1.0607]],
[[1.0017]],
[[0.8578]],
[[0.8996]],
[[1.2058]],
[[1.5769]],
[[0.3948]],
[[1.9032]],
[[1.5842]],
[[0.8509]],
[[0.5622]],
[[1.1660]],
[[0.8977]],
[[1.5093]],
[[1.1062]],
[[1.2246]],
[[0.8448]],
[[1.4493]],
[[0.3505]],
[[0.9474]],
[[0.5256]],
[[0.8694]],
[[0.4868]],
[[0.0969]],
[[0.4921]],
[[1.5016]],
[[0.9448]],
[[1.4434]],
[[1.7585]],
[[0.5021]],
[[0.6727]],
[[0.8761]],
[[0.7765]],
[[1.6651]],
[[1.0956]],
[[1.0736]],
[[0.9917]],
[[0.6800]],
[[0.9621]],
[[1.1723]],
[[1.6984]],
[[1.3828]],
[[1.1961]],
[[0.4888]],
[[0.7521]],
[[1.0235]],
[[1.0640]],
[[1.7740]],
[[1.5297]],
[[0.8286]],
[[1.1369]],
[[0.8487]],
[[0.9517]],
[[0.6779]],
[[0.6218]],
[[0.6213]],
[[0.1666]],
[[0.9039]],
[[1.2772]],
[[0.9496]],
[[0.4835]],
[[1.5748]],
[[0.8697]],
[[1.0034]],
[[1.2036]],
[[1.2556]],
[[1.6560]],
[[0.8015]],
[[1.0273]],
[[1.6960]],
[[0.7601]],
[[1.0579]],
[[1.1157]],
[[0.5176]],
[[1.4782]],
[[0.9160]],
[[1.0034]],
[[0.9725]],
[[1.1045]],
[[1.0801]],
[[0.9276]],
[[1.3573]],
[[0.6147]],
[[0.9622]],
[[1.4226]],
[[1.5819]],
[[0.8953]],
[[0.5653]],
[[0.5581]],
[[0.3532]],
[[0.4953]],
[[1.3510]],
[[0.3470]],
[[1.1750]],
[[1.4454]],
[[1.3551]],
[[0.2888]],
[[0.4776]],
[[0.8189]],
[[0.9688]],
[[1.0687]],
[[0.9678]],
[[0.7002]],
[[0.5775]],
[[0.8948]],
[[1.2374]],
[[0.5895]],
[[1.3014]],
[[0.7965]],
[[1.2102]],
[[0.6317]],
[[0.2871]],
[[1.8184]],
[[0.8634]],
[[1.4611]],
[[1.7472]],
[[0.8506]],
[[1.4314]],
[[0.5936]],
[[1.4848]],
[[0.2429]],
[[0.7143]],
[[0.3413]],
[[0.7312]],
[[1.1133]],
[[1.0198]],
[[1.2218]],
[[1.7162]],
[[1.3755]],
[[0.3494]],
[[0.8366]],
[[0.4923]],
[[1.0076]],
[[1.6403]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 240, 1, 1), torch.float32)], {'model_name': ['pt_mobilenetv3_mobilenet_v3_small_img_cls_torchhub', 'pt_mobilnetv3_mobilenetv3_small_100_img_cls_timm']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5e7edb910>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5c8ef97b0>
fw_out = tensor([[[[1.1614]],
[[1.4333]],
[[0.7536]],
[[0.7971]],
[[0.9725]],
...4710]],
[[0.8490]],
[[1.3893]],
[[0.8116]],
[[0.9532]],
[[1.3122]]]])
co_out = tensor([[[[1.1614]],
[[1.5250]],
[[1.7245]],
[[0.9210]],
[[1.3506]],
...3755]],
[[0.3494]],
[[0.8366]],
[[0.4923]],
[[1.0076]],
[[1.6403]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[1.1614]],
E
E [[1.4333]],
E
E [[0.7536]],
E
E [[0.7971]],
E
E [[0.9725]],
E
E [[1.2992]],
E
E [[1.1552]],
E
E [[1.5615]],
E
E [[1.1207]],
E
E [[1.2974]],
E
E [[1.0140]],
E
E [[1.0668]],
E
E [[0.6874]],
E
E [[0.8340]],
E
E [[0.9590]],
E
E [[1.1836]],
E
E [[1.3628]],
E
E [[1.4651]],
E
E [[0.8261]],
E
E [[0.9474]],
E
E [[1.3467]],
E
E [[1.5803]],
E
E [[1.0622]],
E
E [[1.5393]],
E
E [[1.0845]],
E
E [[1.2180]],
E
E [[1.6178]],
E
E [[0.7013]],
E
E [[0.8503]],
E
E [[1.0385]],
E
E [[0.9702]],
E
E [[1.5971]],
E
E [[0.8410]],
E
E [[0.9349]],
E
E [[0.8158]],
E
E [[0.6968]],
E
E [[0.8732]],
E
E [[1.5949]],
E
E [[1.3882]],
E
E [[1.4074]],
E
E [[1.1914]],
E
E [[0.9088]],
E
E [[1.2497]],
E
E [[0.6982]],
E
E [[0.8038]],
E
E [[0.9073]],
E
E [[1.4806]],
E
E [[1.4583]],
E
E [[0.9433]],
E
E [[1.1471]],
E
E [[1.4849]],
E
E [[1.6622]],
E
E [[1.3635]],
E
E [[1.2326]],
E
E [[1.5003]],
E
E [[0.8707]],
E
E [[1.2583]],
E
E [[0.7774]],
E
E [[0.8186]],
E
E [[0.9068]],
E
E [[1.3913]],
E
E [[1.3662]],
E
E [[0.8689]],
E
E [[1.3161]],
E
E [[1.4396]],
E
E [[1.1020]],
E
E [[1.1842]],
E
E [[1.2809]],
E
E [[1.4753]],
E
E [[1.6452]],
E
E [[0.7798]],
E
E [[0.9819]],
E
E [[1.3616]],
E
E [[1.5794]],
E
E [[1.6002]],
E
E [[1.6063]],
E
E [[1.2646]],
E
E [[0.7303]],
E
E [[1.2111]],
E
E [[0.8523]],
E
E [[0.6991]],
E
E [[1.6093]],
E
E [[1.5453]],
E
E [[0.6663]],
E
E [[1.2587]],
E
E [[1.0809]],
E
E [[1.0828]],
E
E [[0.9362]],
E
E [[1.3574]],
E
E [[0.8689]],
E
E [[1.3484]],
E
E [[1.4180]],
E
E [[1.5230]],
E
E [[1.3521]],
E
E [[0.6702]],
E
E [[0.8407]],
E
E [[1.4148]],
E
E [[1.2697]],
E
E [[0.7751]],
E
E [[0.8772]],
E
E [[1.6355]],
E
E [[1.5020]],
E
E [[0.9471]],
E
E [[1.0393]],
E
E [[0.6888]],
E
E [[1.1561]],
E
E [[0.7886]],
E
E [[0.7794]],
E
E [[1.1375]],
E
E [[1.2402]],
E
E [[0.9603]],
E
E [[1.4618]],
E
E [[0.8608]],
E
E [[1.6188]],
E
E [[1.5077]],
E
E [[0.7435]],
E
E [[1.0407]],
E
E [[1.1877]],
E
E [[1.2380]],
E
E [[1.2837]],
E
E [[1.3613]],
E
E [[1.1950]],
E
E [[0.9211]],
E
E [[1.4017]],
E
E [[0.6855]],
E
E [[0.8687]],
E
E [[1.0399]],
E
E [[0.9215]],
E
E [[0.9902]],
E
E [[0.7553]],
E
E [[1.0587]],
E
E [[1.2720]],
E
E [[0.8394]],
E
E [[1.1394]],
E
E [[1.5230]],
E
E [[1.1137]],
E
E [[1.1790]],
E
E [[1.1220]],
E
E [[1.2663]],
E
E [[1.4830]],
E
E [[1.6387]],
E
E [[1.4826]],
E
E [[1.6398]],
E
E [[1.1289]],
E
E [[0.7159]],
E
E [[0.9281]],
E
E [[1.5055]],
E
E [[1.1619]],
E
E [[0.9166]],
E
E [[0.7819]],
E
E [[0.6972]],
E
E [[0.7431]],
E
E [[1.0637]],
E
E [[1.4393]],
E
E [[1.4354]],
E
E [[0.6829]],
E
E [[1.4770]],
E
E [[0.7738]],
E
E [[1.0594]],
E
E [[0.9624]],
E
E [[1.0688]],
E
E [[1.0669]],
E
E [[0.7164]],
E
E [[0.7334]],
E
E [[1.0869]],
E
E [[1.1716]],
E
E [[0.9380]],
E
E [[1.3534]],
E
E [[0.7151]],
E
E [[1.1314]],
E
E [[1.6048]],
E
E [[0.9612]],
E
E [[1.6166]],
E
E [[1.3462]],
E
E [[0.7139]],
E
E [[1.4814]],
E
E [[1.1074]],
E
E [[0.9419]],
E
E [[1.5649]],
E
E [[0.7610]],
E
E [[1.2187]],
E
E [[1.0604]],
E
E [[1.5222]],
E
E [[1.3047]],
E
E [[1.4053]],
E
E [[1.3417]],
E
E [[1.0449]],
E
E [[1.0599]],
E
E [[0.7531]],
E
E [[1.4360]],
E
E [[1.5621]],
E
E [[1.5072]],
E
E [[0.8124]],
E
E [[1.1874]],
E
E [[0.8126]],
E
E [[0.8899]],
E
E [[0.8737]],
E
E [[1.3360]],
E
E [[0.8671]],
E
E [[1.1542]],
E
E [[1.1861]],
E
E [[1.4874]],
E
E [[0.7871]],
E
E [[0.8218]],
E
E [[0.8748]],
E
E [[1.5151]],
E
E [[0.9854]],
E
E [[1.5868]],
E
E [[1.3459]],
E
E [[1.2284]],
E
E [[1.1614]],
E
E [[1.0663]],
E
E [[1.2278]],
E
E [[1.0509]],
E
E [[1.1616]],
E
E [[1.2289]],
E
E [[0.7740]],
E
E [[0.9030]],
E
E [[1.5688]],
E
E [[0.7593]],
E
E [[1.1292]],
E
E [[1.6597]],
E
E [[1.3457]],
E
E [[1.1793]],
E
E [[0.7318]],
E
E [[1.4128]],
E
E [[0.8090]],
E
E [[1.0232]],
E
E [[0.9973]],
E
E [[1.0911]],
E
E [[1.1706]],
E
E [[1.5775]],
E
E [[1.2275]],
E
E [[1.6129]],
E
E [[1.4710]],
E
E [[0.8490]],
E
E [[1.3893]],
E
E [[0.8116]],
E
E [[0.9532]],
E
E [[1.3122]]]]), compiled_model=tensor([[[[1.1614]],
E
E [[1.5250]],
E
E [[1.7245]],
E
E [[0.9210]],
E
E [[1.3506]],
E
E [[1.6945]],
E
E [[0.8003]],
E
E [[2.0041]],
E
E [[1.3812]],
E
E [[1.5068]],
E
E [[0.6095]],
E
E [[0.6455]],
E
E [[1.2008]],
E
E [[1.9356]],
E
E [[1.2990]],
E
E [[1.6350]],
E
E [[1.2477]],
E
E [[1.5347]],
E
E [[0.8691]],
E
E [[0.4297]],
E
E [[1.2928]],
E
E [[1.7325]],
E
E [[1.5078]],
E
E [[2.4640]],
E
E [[0.8934]],
E
E [[1.9886]],
E
E [[2.6152]],
E
E [[1.1116]],
E
E [[1.2592]],
E
E [[1.4635]],
E
E [[1.7133]],
E
E [[1.7562]],
E
E [[1.4653]],
E
E [[0.9121]],
E
E [[0.5602]],
E
E [[1.0100]],
E
E [[0.9547]],
E
E [[1.8345]],
E
E [[1.3018]],
E
E [[1.6498]],
E
E [[1.5868]],
E
E [[1.5327]],
E
E [[1.3922]],
E
E [[0.7407]],
E
E [[0.9190]],
E
E [[0.9815]],
E
E [[1.7915]],
E
E [[1.4641]],
E
E [[0.9672]],
E
E [[1.7066]],
E
E [[2.0952]],
E
E [[1.9136]],
E
E [[1.5808]],
E
E [[1.3044]],
E
E [[2.0042]],
E
E [[0.9785]],
E
E [[1.1542]],
E
E [[0.9825]],
E
E [[0.8566]],
E
E [[0.8018]],
E
E [[1.3843]],
E
E [[1.1908]],
E
E [[0.5914]],
E
E [[0.8428]],
E
E [[1.6203]],
E
E [[0.5647]],
E
E [[1.2239]],
E
E [[0.9477]],
E
E [[1.0690]],
E
E [[1.5699]],
E
E [[0.3550]],
E
E [[0.9320]],
E
E [[1.2947]],
E
E [[1.0430]],
E
E [[1.5184]],
E
E [[1.6541]],
E
E [[1.2974]],
E
E [[0.5023]],
E
E [[0.6361]],
E
E [[0.6101]],
E
E [[0.7077]],
E
E [[1.2618]],
E
E [[1.5700]],
E
E [[0.8342]],
E
E [[0.8325]],
E
E [[0.9207]],
E
E [[1.1245]],
E
E [[0.8103]],
E
E [[1.2340]],
E
E [[0.7663]],
E
E [[0.7902]],
E
E [[1.2921]],
E
E [[1.7042]],
E
E [[1.6375]],
E
E [[0.7990]],
E
E [[0.7427]],
E
E [[1.4832]],
E
E [[0.8614]],
E
E [[0.1956]],
E
E [[0.2821]],
E
E [[1.9692]],
E
E [[1.6543]],
E
E [[0.4364]],
E
E [[1.0698]],
E
E [[0.9013]],
E
E [[1.4908]],
E
E [[1.0607]],
E
E [[1.0017]],
E
E [[0.8578]],
E
E [[0.8996]],
E
E [[1.2058]],
E
E [[1.5769]],
E
E [[0.3948]],
E
E [[1.9032]],
E
E [[1.5842]],
E
E [[0.8509]],
E
E [[0.5622]],
E
E [[1.1660]],
E
E [[0.8977]],
E
E [[1.5093]],
E
E [[1.1062]],
E
E [[1.2246]],
E
E [[0.8448]],
E
E [[1.4493]],
E
E [[0.3505]],
E
E [[0.9474]],
E
E [[0.5256]],
E
E [[0.8694]],
E
E [[0.4868]],
E
E [[0.0969]],
E
E [[0.4921]],
E
E [[1.5016]],
E
E [[0.9448]],
E
E [[1.4434]],
E
E [[1.7585]],
E
E [[0.5021]],
E
E [[0.6727]],
E
E [[0.8761]],
E
E [[0.7765]],
E
E [[1.6651]],
E
E [[1.0956]],
E
E [[1.0736]],
E
E [[0.9917]],
E
E [[0.6800]],
E
E [[0.9621]],
E
E [[1.1723]],
E
E [[1.6984]],
E
E [[1.3828]],
E
E [[1.1961]],
E
E [[0.4888]],
E
E [[0.7521]],
E
E [[1.0235]],
E
E [[1.0640]],
E
E [[1.7740]],
E
E [[1.5297]],
E
E [[0.8286]],
E
E [[1.1369]],
E
E [[0.8487]],
E
E [[0.9517]],
E
E [[0.6779]],
E
E [[0.6218]],
E
E [[0.6213]],
E
E [[0.1666]],
E
E [[0.9039]],
E
E [[1.2772]],
E
E [[0.9496]],
E
E [[0.4835]],
E
E [[1.5748]],
E
E [[0.8697]],
E
E [[1.0034]],
E
E [[1.2036]],
E
E [[1.2556]],
E
E [[1.6560]],
E
E [[0.8015]],
E
E [[1.0273]],
E
E [[1.6960]],
E
E [[0.7601]],
E
E [[1.0579]],
E
E [[1.1157]],
E
E [[0.5176]],
E
E [[1.4782]],
E
E [[0.9160]],
E
E [[1.0034]],
E
E [[0.9725]],
E
E [[1.1045]],
E
E [[1.0801]],
E
E [[0.9276]],
E
E [[1.3573]],
E
E [[0.6147]],
E
E [[0.9622]],
E
E [[1.4226]],
E
E [[1.5819]],
E
E [[0.8953]],
E
E [[0.5653]],
E
E [[0.5581]],
E
E [[0.3532]],
E
E [[0.4953]],
E
E [[1.3510]],
E
E [[0.3470]],
E
E [[1.1750]],
E
E [[1.4454]],
E
E [[1.3551]],
E
E [[0.2888]],
E
E [[0.4776]],
E
E [[0.8189]],
E
E [[0.9688]],
E
E [[1.0687]],
E
E [[0.9678]],
E
E [[0.7002]],
E
E [[0.5775]],
E
E [[0.8948]],
E
E [[1.2374]],
E
E [[0.5895]],
E
E [[1.3014]],
E
E [[0.7965]],
E
E [[1.2102]],
E
E [[0.6317]],
E
E [[0.2871]],
E
E [[1.8184]],
E
E [[0.8634]],
E
E [[1.4611]],
E
E [[1.7472]],
E
E [[0.8506]],
E
E [[1.4314]],
E
E [[0.5936]],
E
E [[1.4848]],
E
E [[0.2429]],
E
E [[0.7143]],
E
E [[0.3413]],
E
E [[0.7312]],
E
E [[1.1133]],
E
E [[1.0198]],
E
E [[1.2218]],
E
E [[1.7162]],
E
E [[1.3755]],
E
E [[0.3494]],
E
E [[0.8366]],
E
E [[0.4923]],
E
E [[1.0076]],
E
E [[1.6403]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 144, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.5471]],
[[0.8191]],
[[0.1393]],
[[0.1829]],
[[0.3583]],
[[0.6849]],
[[0.5409]],
[[0.9473]],
[[0.5065]],
[[0.6831]],
[[0.3997]],
[[0.4526]],
[[0.0732]],
[[0.2197]],
[[0.3447]],
[[0.5694]],
[[0.7485]],
[[0.8509]],
[[0.2119]],
[[0.3331]],
[[0.7324]],
[[0.9660]],
[[0.4479]],
[[0.9250]],
[[0.4702]],
[[0.6037]],
[[1.0036]],
[[0.0870]],
[[0.2361]],
[[0.4243]],
[[0.3559]],
[[0.9828]],
[[0.2267]],
[[0.3207]],
[[0.2015]],
[[0.0826]],
[[0.2590]],
[[0.9806]],
[[0.7739]],
[[0.7932]],
[[0.5771]],
[[0.2945]],
[[0.6354]],
[[0.0840]],
[[0.1896]],
[[0.2931]],
[[0.8663]],
[[0.8440]],
[[0.3291]],
[[0.5328]],
[[0.8706]],
[[1.0479]],
[[0.7493]],
[[0.6184]],
[[0.8861]],
[[0.2564]],
[[0.6440]],
[[0.1632]],
[[0.2043]],
[[0.2925]],
[[0.7771]],
[[0.7519]],
[[0.2547]],
[[0.7019]],
[[0.8253]],
[[0.4877]],
[[0.5699]],
[[0.6667]],
[[0.8610]],
[[1.0309]],
[[0.1655]],
[[0.3676]],
[[0.7473]],
[[0.9651]],
[[0.9859]],
[[0.9920]],
[[0.6503]],
[[0.1160]],
[[0.5968]],
[[0.2380]],
[[0.0849]],
[[0.9951]],
[[0.9310]],
[[0.0521]],
[[0.6444]],
[[0.4666]],
[[0.4686]],
[[0.3220]],
[[0.7431]],
[[0.2547]],
[[0.7341]],
[[0.8037]],
[[0.9088]],
[[0.7378]],
[[0.0560]],
[[0.2265]],
[[0.8005]],
[[0.6555]],
[[0.1608]],
[[0.2629]],
[[1.0212]],
[[0.8877]],
[[0.3328]],
[[0.4250]],
[[0.0745]],
[[0.5419]],
[[0.1743]],
[[0.1652]],
[[0.5233]],
[[0.6259]],
[[0.3461]],
[[0.8475]],
[[0.2466]],
[[1.0045]],
[[0.8935]],
[[0.1292]],
[[0.4264]],
[[0.5734]],
[[0.6238]],
[[0.6694]],
[[0.7471]],
[[0.5808]],
[[0.3069]],
[[0.7874]],
[[0.0712]],
[[0.2545]],
[[0.4257]],
[[0.3073]],
[[0.3759]],
[[0.1410]],
[[0.4445]],
[[0.6577]],
[[0.2251]],
[[0.5252]],
[[0.9088]],
[[0.4994]],
[[0.5647]],
[[0.5077]],
[[0.6520]],
[[0.8688]],
[[1.0245]],
[[0.8684]],
[[1.0255]],
[[0.5147]]]]), compiled_model=tensor([[[[0.5471]],
[[1.0312]],
[[0.9289]],
[[0.6288]],
[[0.5589]],
[[0.7509]],
[[0.5222]],
[[0.9744]],
[[0.8542]],
[[1.4065]],
[[1.1192]],
[[0.4195]],
[[0.8342]],
[[0.2776]],
[[0.6882]],
[[0.8158]],
[[1.1014]],
[[1.2018]],
[[0.2124]],
[[0.3505]],
[[1.1034]],
[[1.4217]],
[[0.6700]],
[[1.5625]],
[[0.4694]],
[[1.0192]],
[[1.8924]],
[[0.3322]],
[[1.1367]],
[[1.0545]],
[[0.3539]],
[[1.7483]],
[[0.6182]],
[[0.5466]],
[[1.0505]],
[[0.1277]],
[[0.7618]],
[[1.3251]],
[[1.5802]],
[[1.3819]],
[[1.2665]],
[[0.9202]],
[[0.9644]],
[[0.4280]],
[[0.2267]],
[[1.0132]],
[[1.7125]],
[[1.6353]],
[[0.4256]],
[[1.0043]],
[[0.9673]],
[[1.2218]],
[[0.9071]],
[[1.2384]],
[[1.0373]],
[[0.6947]],
[[1.1142]],
[[0.9347]],
[[0.2755]],
[[0.3985]],
[[0.9359]],
[[1.5510]],
[[0.5241]],
[[1.5728]],
[[1.4553]],
[[1.0002]],
[[1.0154]],
[[1.0170]],
[[1.3729]],
[[1.3659]],
[[0.6112]],
[[0.8806]],
[[0.8054]],
[[1.1522]],
[[1.8389]],
[[1.0354]],
[[1.0636]],
[[1.0598]],
[[1.2266]],
[[0.7014]],
[[0.1007]],
[[1.6919]],
[[1.0240]],
[[0.3593]],
[[0.9258]],
[[0.8417]],
[[0.9232]],
[[1.1835]],
[[1.2547]],
[[1.1517]],
[[1.4892]],
[[0.9367]],
[[1.5822]],
[[0.8335]],
[[0.2932]],
[[0.8227]],
[[1.4148]],
[[1.3614]],
[[1.7460]],
[[1.0011]],
[[2.0135]],
[[1.8973]],
[[0.5922]],
[[1.4818]],
[[0.9493]],
[[1.3655]],
[[0.3841]],
[[0.3581]],
[[1.6510]],
[[2.3418]],
[[1.3003]],
[[1.9132]],
[[0.7457]],
[[1.6884]],
[[1.5507]],
[[0.2258]],
[[0.9867]],
[[1.3399]],
[[1.6837]],
[[2.2084]],
[[1.1702]],
[[1.9656]],
[[1.9185]],
[[1.8120]],
[[1.0944]],
[[1.2938]],
[[1.7830]],
[[1.0806]],
[[1.6145]],
[[0.7325]],
[[0.8031]],
[[1.5852]],
[[0.9209]],
[[1.3791]],
[[1.4366]],
[[1.3561]],
[[1.5744]],
[[1.7459]],
[[1.4088]],
[[1.5254]],
[[1.7539]],
[[1.5568]],
[[1.9508]],
[[1.1347]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 144, 1, 1), torch.float32)], {'model_name': ['pt_mobilenetv3_mobilenet_v3_small_img_cls_torchhub', 'pt_mobilnetv3_mobilenetv3_small_100_img_cls_timm']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0db8040>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5e04795a0>
fw_out = tensor([[[[0.5471]],
[[0.8191]],
[[0.1393]],
[[0.1829]],
[[0.3583]],
...6520]],
[[0.8688]],
[[1.0245]],
[[0.8684]],
[[1.0255]],
[[0.5147]]]])
co_out = tensor([[[[0.5471]],
[[1.0312]],
[[0.9289]],
[[0.6288]],
[[0.5589]],
...4088]],
[[1.5254]],
[[1.7539]],
[[1.5568]],
[[1.9508]],
[[1.1347]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.5471]],
E
E [[0.8191]],
E
E [[0.1393]],
E
E [[0.1829]],
E
E [[0.3583]],
E
E [[0.6849]],
E
E [[0.5409]],
E
E [[0.9473]],
E
E [[0.5065]],
E
E [[0.6831]],
E
E [[0.3997]],
E
E [[0.4526]],
E
E [[0.0732]],
E
E [[0.2197]],
E
E [[0.3447]],
E
E [[0.5694]],
E
E [[0.7485]],
E
E [[0.8509]],
E
E [[0.2119]],
E
E [[0.3331]],
E
E [[0.7324]],
E
E [[0.9660]],
E
E [[0.4479]],
E
E [[0.9250]],
E
E [[0.4702]],
E
E [[0.6037]],
E
E [[1.0036]],
E
E [[0.0870]],
E
E [[0.2361]],
E
E [[0.4243]],
E
E [[0.3559]],
E
E [[0.9828]],
E
E [[0.2267]],
E
E [[0.3207]],
E
E [[0.2015]],
E
E [[0.0826]],
E
E [[0.2590]],
E
E [[0.9806]],
E
E [[0.7739]],
E
E [[0.7932]],
E
E [[0.5771]],
E
E [[0.2945]],
E
E [[0.6354]],
E
E [[0.0840]],
E
E [[0.1896]],
E
E [[0.2931]],
E
E [[0.8663]],
E
E [[0.8440]],
E
E [[0.3291]],
E
E [[0.5328]],
E
E [[0.8706]],
E
E [[1.0479]],
E
E [[0.7493]],
E
E [[0.6184]],
E
E [[0.8861]],
E
E [[0.2564]],
E
E [[0.6440]],
E
E [[0.1632]],
E
E [[0.2043]],
E
E [[0.2925]],
E
E [[0.7771]],
E
E [[0.7519]],
E
E [[0.2547]],
E
E [[0.7019]],
E
E [[0.8253]],
E
E [[0.4877]],
E
E [[0.5699]],
E
E [[0.6667]],
E
E [[0.8610]],
E
E [[1.0309]],
E
E [[0.1655]],
E
E [[0.3676]],
E
E [[0.7473]],
E
E [[0.9651]],
E
E [[0.9859]],
E
E [[0.9920]],
E
E [[0.6503]],
E
E [[0.1160]],
E
E [[0.5968]],
E
E [[0.2380]],
E
E [[0.0849]],
E
E [[0.9951]],
E
E [[0.9310]],
E
E [[0.0521]],
E
E [[0.6444]],
E
E [[0.4666]],
E
E [[0.4686]],
E
E [[0.3220]],
E
E [[0.7431]],
E
E [[0.2547]],
E
E [[0.7341]],
E
E [[0.8037]],
E
E [[0.9088]],
E
E [[0.7378]],
E
E [[0.0560]],
E
E [[0.2265]],
E
E [[0.8005]],
E
E [[0.6555]],
E
E [[0.1608]],
E
E [[0.2629]],
E
E [[1.0212]],
E
E [[0.8877]],
E
E [[0.3328]],
E
E [[0.4250]],
E
E [[0.0745]],
E
E [[0.5419]],
E
E [[0.1743]],
E
E [[0.1652]],
E
E [[0.5233]],
E
E [[0.6259]],
E
E [[0.3461]],
E
E [[0.8475]],
E
E [[0.2466]],
E
E [[1.0045]],
E
E [[0.8935]],
E
E [[0.1292]],
E
E [[0.4264]],
E
E [[0.5734]],
E
E [[0.6238]],
E
E [[0.6694]],
E
E [[0.7471]],
E
E [[0.5808]],
E
E [[0.3069]],
E
E [[0.7874]],
E
E [[0.0712]],
E
E [[0.2545]],
E
E [[0.4257]],
E
E [[0.3073]],
E
E [[0.3759]],
E
E [[0.1410]],
E
E [[0.4445]],
E
E [[0.6577]],
E
E [[0.2251]],
E
E [[0.5252]],
E
E [[0.9088]],
E
E [[0.4994]],
E
E [[0.5647]],
E
E [[0.5077]],
E
E [[0.6520]],
E
E [[0.8688]],
E
E [[1.0245]],
E
E [[0.8684]],
E
E [[1.0255]],
E
E [[0.5147]]]]), compiled_model=tensor([[[[0.5471]],
E
E [[1.0312]],
E
E [[0.9289]],
E
E [[0.6288]],
E
E [[0.5589]],
E
E [[0.7509]],
E
E [[0.5222]],
E
E [[0.9744]],
E
E [[0.8542]],
E
E [[1.4065]],
E
E [[1.1192]],
E
E [[0.4195]],
E
E [[0.8342]],
E
E [[0.2776]],
E
E [[0.6882]],
E
E [[0.8158]],
E
E [[1.1014]],
E
E [[1.2018]],
E
E [[0.2124]],
E
E [[0.3505]],
E
E [[1.1034]],
E
E [[1.4217]],
E
E [[0.6700]],
E
E [[1.5625]],
E
E [[0.4694]],
E
E [[1.0192]],
E
E [[1.8924]],
E
E [[0.3322]],
E
E [[1.1367]],
E
E [[1.0545]],
E
E [[0.3539]],
E
E [[1.7483]],
E
E [[0.6182]],
E
E [[0.5466]],
E
E [[1.0505]],
E
E [[0.1277]],
E
E [[0.7618]],
E
E [[1.3251]],
E
E [[1.5802]],
E
E [[1.3819]],
E
E [[1.2665]],
E
E [[0.9202]],
E
E [[0.9644]],
E
E [[0.4280]],
E
E [[0.2267]],
E
E [[1.0132]],
E
E [[1.7125]],
E
E [[1.6353]],
E
E [[0.4256]],
E
E [[1.0043]],
E
E [[0.9673]],
E
E [[1.2218]],
E
E [[0.9071]],
E
E [[1.2384]],
E
E [[1.0373]],
E
E [[0.6947]],
E
E [[1.1142]],
E
E [[0.9347]],
E
E [[0.2755]],
E
E [[0.3985]],
E
E [[0.9359]],
E
E [[1.5510]],
E
E [[0.5241]],
E
E [[1.5728]],
E
E [[1.4553]],
E
E [[1.0002]],
E
E [[1.0154]],
E
E [[1.0170]],
E
E [[1.3729]],
E
E [[1.3659]],
E
E [[0.6112]],
E
E [[0.8806]],
E
E [[0.8054]],
E
E [[1.1522]],
E
E [[1.8389]],
E
E [[1.0354]],
E
E [[1.0636]],
E
E [[1.0598]],
E
E [[1.2266]],
E
E [[0.7014]],
E
E [[0.1007]],
E
E [[1.6919]],
E
E [[1.0240]],
E
E [[0.3593]],
E
E [[0.9258]],
E
E [[0.8417]],
E
E [[0.9232]],
E
E [[1.1835]],
E
E [[1.2547]],
E
E [[1.1517]],
E
E [[1.4892]],
E
E [[0.9367]],
E
E [[1.5822]],
E
E [[0.8335]],
E
E [[0.2932]],
E
E [[0.8227]],
E
E [[1.4148]],
E
E [[1.3614]],
E
E [[1.7460]],
E
E [[1.0011]],
E
E [[2.0135]],
E
E [[1.8973]],
E
E [[0.5922]],
E
E [[1.4818]],
E
E [[0.9493]],
E
E [[1.3655]],
E
E [[0.3841]],
E
E [[0.3581]],
E
E [[1.6510]],
E
E [[2.3418]],
E
E [[1.3003]],
E
E [[1.9132]],
E
E [[0.7457]],
E
E [[1.6884]],
E
E [[1.5507]],
E
E [[0.2258]],
E
E [[0.9867]],
E
E [[1.3399]],
E
E [[1.6837]],
E
E [[2.2084]],
E
E [[1.1702]],
E
E [[1.9656]],
E
E [[1.9185]],
E
E [[1.8120]],
E
E [[1.0944]],
E
E [[1.2938]],
E
E [[1.7830]],
E
E [[1.0806]],
E
E [[1.6145]],
E
E [[0.7325]],
E
E [[0.8031]],
E
E [[1.5852]],
E
E [[0.9209]],
E
E [[1.3791]],
E
E [[1.4366]],
E
E [[1.3561]],
E
E [[1.5744]],
E
E [[1.7459]],
E
E [[1.4088]],
E
E [[1.5254]],
E
E [[1.7539]],
E
E [[1.5568]],
E
E [[1.9508]],
E
E [[1.1347]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 288, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.8819]],
[[1.1539]],
[[0.4741]],
[[0.5177]],
[[0.6931]],
[[1.0197]],
[[0.8758]],
[[1.2821]],
[[0.8413]],
[[1.0180]],
[[0.7346]],
[[0.7874]],
[[0.4080]],
[[0.5545]],
[[0.6796]],
[[0.9042]],
[[1.0833]],
[[1.1857]],
[[0.5467]],
[[0.6679]],
[[1.0673]],
[[1.3009]],
[[0.7828]],
[[1.2598]],
[[0.8051]],
[[0.9386]],
[[1.3384]],
[[0.4218]],
[[0.5709]],
[[0.7591]],
[[0.6908]],
[[1.3177]],
[[0.5616]],
[[0.6555]],
[[0.5363]],
[[0.4174]],
[[0.5938]],
[[1.3155]],
[[1.1088]],
[[1.1280]],
[[0.9120]],
[[0.6293]],
[[0.9703]],
[[0.4188]],
[[0.5244]],
[[0.6279]],
[[1.2011]],
[[1.1788]],
[[0.6639]],
[[0.8676]],
[[1.2054]],
[[1.3827]],
[[1.0841]],
[[0.9532]],
[[1.2209]],
[[0.5913]],
[[0.9788]],
[[0.4980]],
[[0.5391]],
[[0.6274]],
[[1.1119]],
[[1.0867]],
[[0.5895]],
[[1.0367]],
[[1.1601]],
[[0.8226]],
[[0.9048]],
[[1.0015]],
[[1.1959]],
[[1.3658]],
[[0.5004]],
[[0.7024]],
[[1.0822]],
[[1.2999]],
[[1.3208]],
[[1.3268]],
[[0.9852]],
[[0.4509]],
[[0.9317]],
[[0.5729]],
[[0.4197]],
[[1.3299]],
[[1.2658]],
[[0.3869]],
[[0.9792]],
[[0.8014]],
[[0.8034]],
[[0.6568]],
[[1.0779]],
[[0.5895]],
[[1.0690]],
[[1.1385]],
[[1.2436]],
[[1.0726]],
[[0.3908]],
[[0.5613]],
[[1.1353]],
[[0.9903]],
[[0.4956]],
[[0.5978]],
[[1.3560]],
[[1.2226]],
[[0.6676]],
[[0.7598]],
[[0.4094]],
[[0.8767]],
[[0.5091]],
[[0.5000]],
[[0.8581]],
[[0.9607]],
[[0.6809]],
[[1.1824]],
[[0.5814]],
[[1.3393]],
[[1.2283]],
[[0.4640]],
[[0.7612]],
[[0.9082]],
[[0.9586]],
[[1.0042]],
[[1.0819]],
[[0.9156]],
[[0.6417]],
[[1.1223]],
[[0.4060]],
[[0.5893]],
[[0.7605]],
[[0.6421]],
[[0.7107]],
[[0.4759]],
[[0.7793]],
[[0.9925]],
[[0.5599]],
[[0.8600]],
[[1.2436]],
[[0.8343]],
[[0.8996]],
[[0.8425]],
[[0.9869]],
[[1.2036]],
[[1.3593]],
[[1.2032]],
[[1.3604]],
[[0.8495]],
[[0.4365]],
[[0.6486]],
[[1.2261]],
[[0.8824]],
[[0.6371]],
[[0.5025]],
[[0.4177]],
[[0.4637]],
[[0.7842]],
[[1.1599]],
[[1.1560]],
[[0.4034]],
[[1.1976]],
[[0.4944]],
[[0.7800]],
[[0.6829]],
[[0.7894]],
[[0.7875]],
[[0.4370]],
[[0.4539]],
[[0.8074]],
[[0.8921]],
[[0.6585]],
[[1.0740]],
[[0.4356]],
[[0.8519]],
[[1.3254]],
[[0.6817]],
[[1.3372]],
[[1.0667]],
[[0.4344]],
[[1.2020]],
[[0.8280]],
[[0.6625]],
[[1.2855]],
[[0.4816]],
[[0.9393]],
[[0.7810]],
[[1.2427]],
[[1.0252]],
[[1.1259]],
[[1.0622]],
[[0.7654]],
[[0.7805]],
[[0.4736]],
[[1.1566]],
[[1.2827]],
[[1.2278]],
[[0.5330]],
[[0.9080]],
[[0.5332]],
[[0.6104]],
[[0.5943]],
[[1.0565]],
[[0.5877]],
[[0.8748]],
[[0.9067]],
[[1.2080]],
[[0.5077]],
[[0.5424]],
[[0.5953]],
[[1.2356]],
[[0.7059]],
[[1.3074]],
[[1.0665]],
[[0.9490]],
[[0.8819]],
[[0.7868]],
[[0.9484]],
[[0.7715]],
[[0.8821]],
[[0.9495]],
[[0.4946]],
[[0.6236]],
[[1.2894]],
[[0.4799]],
[[0.8498]],
[[1.3803]],
[[1.0663]],
[[0.8998]],
[[0.4524]],
[[1.1334]],
[[0.5295]],
[[0.7437]],
[[0.7179]],
[[0.8116]],
[[0.8911]],
[[1.2981]],
[[0.9481]],
[[1.3335]],
[[1.1915]],
[[0.5696]],
[[1.1099]],
[[0.5322]],
[[0.6737]],
[[1.0327]],
[[1.0508]],
[[1.2608]],
[[0.7247]],
[[0.8865]],
[[1.1431]],
[[0.4021]],
[[1.2472]],
[[0.4722]],
[[0.8926]],
[[0.8007]],
[[0.6223]],
[[0.9517]],
[[1.2991]],
[[0.7395]],
[[0.5888]],
[[0.7007]],
[[0.3901]],
[[1.1114]],
[[0.6455]],
[[0.5520]],
[[0.5976]],
[[1.1731]],
[[1.1504]],
[[1.2694]],
[[1.0670]],
[[0.7187]],
[[0.7459]],
[[1.0334]],
[[1.2967]],
[[1.0216]],
[[0.6491]],
[[0.6506]],
[[0.4129]],
[[0.9937]],
[[0.6051]],
[[0.4399]],
[[1.3241]],
[[0.5610]],
[[0.8288]],
[[1.0289]],
[[0.9016]],
[[0.5492]],
[[0.4815]],
[[1.2842]],
[[0.9671]],
[[1.3005]],
[[0.7180]],
[[1.0329]]]]), compiled_model=tensor([[[[0.8819]],
[[1.2906]],
[[0.6118]],
[[0.2749]],
[[0.6144]],
[[0.9873]],
[[0.7578]],
[[1.2064]],
[[1.0141]],
[[1.6125]],
[[0.6422]],
[[0.4687]],
[[0.6595]],
[[1.1170]],
[[0.3022]],
[[1.4577]],
[[1.5588]],
[[1.4526]],
[[0.9559]],
[[0.4654]],
[[0.7262]],
[[1.8695]],
[[1.1443]],
[[1.0461]],
[[0.5686]],
[[1.1868]],
[[1.7802]],
[[0.9739]],
[[0.7482]],
[[1.1661]],
[[1.0003]],
[[1.6088]],
[[0.4256]],
[[0.4077]],
[[1.1362]],
[[0.6355]],
[[0.2586]],
[[1.2449]],
[[1.2727]],
[[1.3262]],
[[0.6524]],
[[1.1805]],
[[0.6235]],
[[0.7781]],
[[0.4177]],
[[0.3883]],
[[1.5366]],
[[1.1733]],
[[0.5795]],
[[0.9295]],
[[1.1412]],
[[1.9626]],
[[0.7275]],
[[0.8961]],
[[1.2745]],
[[0.5666]],
[[0.7864]],
[[0.6786]],
[[0.6635]],
[[1.0594]],
[[1.3904]],
[[1.5434]],
[[0.3507]],
[[1.5682]],
[[1.0653]],
[[0.9305]],
[[1.2032]],
[[1.0125]],
[[1.1776]],
[[1.7073]],
[[0.9749]],
[[0.4916]],
[[1.4845]],
[[1.7454]],
[[1.6429]],
[[1.6019]],
[[1.3679]],
[[1.0388]],
[[1.3856]],
[[0.7031]],
[[0.1927]],
[[1.4320]],
[[1.0929]],
[[0.9422]],
[[0.9451]],
[[0.9518]],
[[1.1292]],
[[0.9450]],
[[0.7822]],
[[0.3811]],
[[1.2687]],
[[0.8856]],
[[1.5526]],
[[1.3195]],
[[0.3105]],
[[0.2802]],
[[1.3263]],
[[0.8255]],
[[1.0898]],
[[1.0350]],
[[1.7904]],
[[1.5708]],
[[0.9769]],
[[0.4988]],
[[0.9998]],
[[0.8888]],
[[0.9132]],
[[1.0734]],
[[1.0686]],
[[0.7128]],
[[0.7715]],
[[1.0055]],
[[0.2886]],
[[1.8109]],
[[1.1538]],
[[0.9800]],
[[0.5849]],
[[1.2297]],
[[1.0393]],
[[1.0629]],
[[1.4850]],
[[0.5772]],
[[1.1739]],
[[1.1341]],
[[0.7462]],
[[0.4069]],
[[0.6140]],
[[0.7448]],
[[0.7054]],
[[0.3869]],
[[1.1642]],
[[1.0523]],
[[0.4691]],
[[0.9172]],
[[1.8058]],
[[0.5387]],
[[0.9307]],
[[0.8787]],
[[1.3446]],
[[1.7301]],
[[1.1597]],
[[1.2249]],
[[1.0094]],
[[0.5941]],
[[0.7383]],
[[0.7085]],
[[1.7106]],
[[0.9492]],
[[0.5232]],
[[0.4519]],
[[0.5850]],
[[0.8983]],
[[0.9395]],
[[1.3882]],
[[1.0947]],
[[0.7077]],
[[0.9744]],
[[0.9194]],
[[0.8728]],
[[1.2173]],
[[0.6492]],
[[1.3920]],
[[0.8690]],
[[0.1645]],
[[0.7734]],
[[1.4431]],
[[0.9427]],
[[1.6042]],
[[0.4451]],
[[0.7527]],
[[1.7713]],
[[0.6460]],
[[1.9049]],
[[1.4731]],
[[1.0465]],
[[1.6934]],
[[0.7938]],
[[1.1985]],
[[1.0119]],
[[1.0852]],
[[1.0523]],
[[0.6674]],
[[1.5031]],
[[1.4079]],
[[1.2637]],
[[1.5320]],
[[0.8442]],
[[0.4035]],
[[0.9104]],
[[1.2754]],
[[1.6776]],
[[1.0952]],
[[0.9740]],
[[1.2101]],
[[0.3570]],
[[0.2600]],
[[0.6920]],
[[1.2641]],
[[0.4005]],
[[0.8563]],
[[0.9133]],
[[0.8862]],
[[0.4125]],
[[0.3939]],
[[0.7333]],
[[1.8460]],
[[1.1237]],
[[1.6750]],
[[1.6145]],
[[1.1031]],
[[1.0992]],
[[0.6235]],
[[1.3412]],
[[1.1457]],
[[1.1195]],
[[0.7569]],
[[0.7361]],
[[0.7995]],
[[1.7021]],
[[0.5470]],
[[1.4125]],
[[1.4078]],
[[1.4750]],
[[1.1136]],
[[0.1784]],
[[1.4374]],
[[0.1957]],
[[0.4324]],
[[1.2143]],
[[0.6173]],
[[0.6785]],
[[1.2819]],
[[1.4744]],
[[1.9414]],
[[1.1064]],
[[0.4062]],
[[0.9223]],
[[0.4108]],
[[0.3110]],
[[0.9191]],
[[1.2791]],
[[1.2254]],
[[0.3572]],
[[0.9373]],
[[1.7143]],
[[0.6263]],
[[1.5295]],
[[0.2024]],
[[0.6213]],
[[0.5846]],
[[0.5907]],
[[1.0082]],
[[1.1991]],
[[1.0248]],
[[1.0223]],
[[0.6894]],
[[0.0441]],
[[1.0225]],
[[1.1920]],
[[0.2592]],
[[0.5326]],
[[1.5008]],
[[1.1492]],
[[1.8549]],
[[1.0293]],
[[1.1905]],
[[1.1300]],
[[0.6930]],
[[1.8925]],
[[0.8223]],
[[1.0685]],
[[0.4496]],
[[0.3425]],
[[1.3118]],
[[0.6297]],
[[0.2179]],
[[1.7841]],
[[0.8223]],
[[0.9522]],
[[1.5563]],
[[1.2103]],
[[1.1195]],
[[0.5960]],
[[0.9722]],
[[1.3726]],
[[1.0165]],
[[1.0131]],
[[1.1399]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 288, 1, 1), torch.float32)], {'model_name': ['pt_mobilenetv3_mobilenet_v3_small_img_cls_torchhub', 'pt_mobilnetv3_mobilenetv3_small_100_img_cls_timm']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb6171cb490>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0a6ed40>
fw_out = tensor([[[[0.8819]],
[[1.1539]],
[[0.4741]],
[[0.5177]],
[[0.6931]],
...4815]],
[[1.2842]],
[[0.9671]],
[[1.3005]],
[[0.7180]],
[[1.0329]]]])
co_out = tensor([[[[0.8819]],
[[1.2906]],
[[0.6118]],
[[0.2749]],
[[0.6144]],
...5960]],
[[0.9722]],
[[1.3726]],
[[1.0165]],
[[1.0131]],
[[1.1399]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.8819]],
E
E [[1.1539]],
E
E [[0.4741]],
E
E [[0.5177]],
E
E [[0.6931]],
E
E [[1.0197]],
E
E [[0.8758]],
E
E [[1.2821]],
E
E [[0.8413]],
E
E [[1.0180]],
E
E [[0.7346]],
E
E [[0.7874]],
E
E [[0.4080]],
E
E [[0.5545]],
E
E [[0.6796]],
E
E [[0.9042]],
E
E [[1.0833]],
E
E [[1.1857]],
E
E [[0.5467]],
E
E [[0.6679]],
E
E [[1.0673]],
E
E [[1.3009]],
E
E [[0.7828]],
E
E [[1.2598]],
E
E [[0.8051]],
E
E [[0.9386]],
E
E [[1.3384]],
E
E [[0.4218]],
E
E [[0.5709]],
E
E [[0.7591]],
E
E [[0.6908]],
E
E [[1.3177]],
E
E [[0.5616]],
E
E [[0.6555]],
E
E [[0.5363]],
E
E [[0.4174]],
E
E [[0.5938]],
E
E [[1.3155]],
E
E [[1.1088]],
E
E [[1.1280]],
E
E [[0.9120]],
E
E [[0.6293]],
E
E [[0.9703]],
E
E [[0.4188]],
E
E [[0.5244]],
E
E [[0.6279]],
E
E [[1.2011]],
E
E [[1.1788]],
E
E [[0.6639]],
E
E [[0.8676]],
E
E [[1.2054]],
E
E [[1.3827]],
E
E [[1.0841]],
E
E [[0.9532]],
E
E [[1.2209]],
E
E [[0.5913]],
E
E [[0.9788]],
E
E [[0.4980]],
E
E [[0.5391]],
E
E [[0.6274]],
E
E [[1.1119]],
E
E [[1.0867]],
E
E [[0.5895]],
E
E [[1.0367]],
E
E [[1.1601]],
E
E [[0.8226]],
E
E [[0.9048]],
E
E [[1.0015]],
E
E [[1.1959]],
E
E [[1.3658]],
E
E [[0.5004]],
E
E [[0.7024]],
E
E [[1.0822]],
E
E [[1.2999]],
E
E [[1.3208]],
E
E [[1.3268]],
E
E [[0.9852]],
E
E [[0.4509]],
E
E [[0.9317]],
E
E [[0.5729]],
E
E [[0.4197]],
E
E [[1.3299]],
E
E [[1.2658]],
E
E [[0.3869]],
E
E [[0.9792]],
E
E [[0.8014]],
E
E [[0.8034]],
E
E [[0.6568]],
E
E [[1.0779]],
E
E [[0.5895]],
E
E [[1.0690]],
E
E [[1.1385]],
E
E [[1.2436]],
E
E [[1.0726]],
E
E [[0.3908]],
E
E [[0.5613]],
E
E [[1.1353]],
E
E [[0.9903]],
E
E [[0.4956]],
E
E [[0.5978]],
E
E [[1.3560]],
E
E [[1.2226]],
E
E [[0.6676]],
E
E [[0.7598]],
E
E [[0.4094]],
E
E [[0.8767]],
E
E [[0.5091]],
E
E [[0.5000]],
E
E [[0.8581]],
E
E [[0.9607]],
E
E [[0.6809]],
E
E [[1.1824]],
E
E [[0.5814]],
E
E [[1.3393]],
E
E [[1.2283]],
E
E [[0.4640]],
E
E [[0.7612]],
E
E [[0.9082]],
E
E [[0.9586]],
E
E [[1.0042]],
E
E [[1.0819]],
E
E [[0.9156]],
E
E [[0.6417]],
E
E [[1.1223]],
E
E [[0.4060]],
E
E [[0.5893]],
E
E [[0.7605]],
E
E [[0.6421]],
E
E [[0.7107]],
E
E [[0.4759]],
E
E [[0.7793]],
E
E [[0.9925]],
E
E [[0.5599]],
E
E [[0.8600]],
E
E [[1.2436]],
E
E [[0.8343]],
E
E [[0.8996]],
E
E [[0.8425]],
E
E [[0.9869]],
E
E [[1.2036]],
E
E [[1.3593]],
E
E [[1.2032]],
E
E [[1.3604]],
E
E [[0.8495]],
E
E [[0.4365]],
E
E [[0.6486]],
E
E [[1.2261]],
E
E [[0.8824]],
E
E [[0.6371]],
E
E [[0.5025]],
E
E [[0.4177]],
E
E [[0.4637]],
E
E [[0.7842]],
E
E [[1.1599]],
E
E [[1.1560]],
E
E [[0.4034]],
E
E [[1.1976]],
E
E [[0.4944]],
E
E [[0.7800]],
E
E [[0.6829]],
E
E [[0.7894]],
E
E [[0.7875]],
E
E [[0.4370]],
E
E [[0.4539]],
E
E [[0.8074]],
E
E [[0.8921]],
E
E [[0.6585]],
E
E [[1.0740]],
E
E [[0.4356]],
E
E [[0.8519]],
E
E [[1.3254]],
E
E [[0.6817]],
E
E [[1.3372]],
E
E [[1.0667]],
E
E [[0.4344]],
E
E [[1.2020]],
E
E [[0.8280]],
E
E [[0.6625]],
E
E [[1.2855]],
E
E [[0.4816]],
E
E [[0.9393]],
E
E [[0.7810]],
E
E [[1.2427]],
E
E [[1.0252]],
E
E [[1.1259]],
E
E [[1.0622]],
E
E [[0.7654]],
E
E [[0.7805]],
E
E [[0.4736]],
E
E [[1.1566]],
E
E [[1.2827]],
E
E [[1.2278]],
E
E [[0.5330]],
E
E [[0.9080]],
E
E [[0.5332]],
E
E [[0.6104]],
E
E [[0.5943]],
E
E [[1.0565]],
E
E [[0.5877]],
E
E [[0.8748]],
E
E [[0.9067]],
E
E [[1.2080]],
E
E [[0.5077]],
E
E [[0.5424]],
E
E [[0.5953]],
E
E [[1.2356]],
E
E [[0.7059]],
E
E [[1.3074]],
E
E [[1.0665]],
E
E [[0.9490]],
E
E [[0.8819]],
E
E [[0.7868]],
E
E [[0.9484]],
E
E [[0.7715]],
E
E [[0.8821]],
E
E [[0.9495]],
E
E [[0.4946]],
E
E [[0.6236]],
E
E [[1.2894]],
E
E [[0.4799]],
E
E [[0.8498]],
E
E [[1.3803]],
E
E [[1.0663]],
E
E [[0.8998]],
E
E [[0.4524]],
E
E [[1.1334]],
E
E [[0.5295]],
E
E [[0.7437]],
E
E [[0.7179]],
E
E [[0.8116]],
E
E [[0.8911]],
E
E [[1.2981]],
E
E [[0.9481]],
E
E [[1.3335]],
E
E [[1.1915]],
E
E [[0.5696]],
E
E [[1.1099]],
E
E [[0.5322]],
E
E [[0.6737]],
E
E [[1.0327]],
E
E [[1.0508]],
E
E [[1.2608]],
E
E [[0.7247]],
E
E [[0.8865]],
E
E [[1.1431]],
E
E [[0.4021]],
E
E [[1.2472]],
E
E [[0.4722]],
E
E [[0.8926]],
E
E [[0.8007]],
E
E [[0.6223]],
E
E [[0.9517]],
E
E [[1.2991]],
E
E [[0.7395]],
E
E [[0.5888]],
E
E [[0.7007]],
E
E [[0.3901]],
E
E [[1.1114]],
E
E [[0.6455]],
E
E [[0.5520]],
E
E [[0.5976]],
E
E [[1.1731]],
E
E [[1.1504]],
E
E [[1.2694]],
E
E [[1.0670]],
E
E [[0.7187]],
E
E [[0.7459]],
E
E [[1.0334]],
E
E [[1.2967]],
E
E [[1.0216]],
E
E [[0.6491]],
E
E [[0.6506]],
E
E [[0.4129]],
E
E [[0.9937]],
E
E [[0.6051]],
E
E [[0.4399]],
E
E [[1.3241]],
E
E [[0.5610]],
E
E [[0.8288]],
E
E [[1.0289]],
E
E [[0.9016]],
E
E [[0.5492]],
E
E [[0.4815]],
E
E [[1.2842]],
E
E [[0.9671]],
E
E [[1.3005]],
E
E [[0.7180]],
E
E [[1.0329]]]]), compiled_model=tensor([[[[0.8819]],
E
E [[1.2906]],
E
E [[0.6118]],
E
E [[0.2749]],
E
E [[0.6144]],
E
E [[0.9873]],
E
E [[0.7578]],
E
E [[1.2064]],
E
E [[1.0141]],
E
E [[1.6125]],
E
E [[0.6422]],
E
E [[0.4687]],
E
E [[0.6595]],
E
E [[1.1170]],
E
E [[0.3022]],
E
E [[1.4577]],
E
E [[1.5588]],
E
E [[1.4526]],
E
E [[0.9559]],
E
E [[0.4654]],
E
E [[0.7262]],
E
E [[1.8695]],
E
E [[1.1443]],
E
E [[1.0461]],
E
E [[0.5686]],
E
E [[1.1868]],
E
E [[1.7802]],
E
E [[0.9739]],
E
E [[0.7482]],
E
E [[1.1661]],
E
E [[1.0003]],
E
E [[1.6088]],
E
E [[0.4256]],
E
E [[0.4077]],
E
E [[1.1362]],
E
E [[0.6355]],
E
E [[0.2586]],
E
E [[1.2449]],
E
E [[1.2727]],
E
E [[1.3262]],
E
E [[0.6524]],
E
E [[1.1805]],
E
E [[0.6235]],
E
E [[0.7781]],
E
E [[0.4177]],
E
E [[0.3883]],
E
E [[1.5366]],
E
E [[1.1733]],
E
E [[0.5795]],
E
E [[0.9295]],
E
E [[1.1412]],
E
E [[1.9626]],
E
E [[0.7275]],
E
E [[0.8961]],
E
E [[1.2745]],
E
E [[0.5666]],
E
E [[0.7864]],
E
E [[0.6786]],
E
E [[0.6635]],
E
E [[1.0594]],
E
E [[1.3904]],
E
E [[1.5434]],
E
E [[0.3507]],
E
E [[1.5682]],
E
E [[1.0653]],
E
E [[0.9305]],
E
E [[1.2032]],
E
E [[1.0125]],
E
E [[1.1776]],
E
E [[1.7073]],
E
E [[0.9749]],
E
E [[0.4916]],
E
E [[1.4845]],
E
E [[1.7454]],
E
E [[1.6429]],
E
E [[1.6019]],
E
E [[1.3679]],
E
E [[1.0388]],
E
E [[1.3856]],
E
E [[0.7031]],
E
E [[0.1927]],
E
E [[1.4320]],
E
E [[1.0929]],
E
E [[0.9422]],
E
E [[0.9451]],
E
E [[0.9518]],
E
E [[1.1292]],
E
E [[0.9450]],
E
E [[0.7822]],
E
E [[0.3811]],
E
E [[1.2687]],
E
E [[0.8856]],
E
E [[1.5526]],
E
E [[1.3195]],
E
E [[0.3105]],
E
E [[0.2802]],
E
E [[1.3263]],
E
E [[0.8255]],
E
E [[1.0898]],
E
E [[1.0350]],
E
E [[1.7904]],
E
E [[1.5708]],
E
E [[0.9769]],
E
E [[0.4988]],
E
E [[0.9998]],
E
E [[0.8888]],
E
E [[0.9132]],
E
E [[1.0734]],
E
E [[1.0686]],
E
E [[0.7128]],
E
E [[0.7715]],
E
E [[1.0055]],
E
E [[0.2886]],
E
E [[1.8109]],
E
E [[1.1538]],
E
E [[0.9800]],
E
E [[0.5849]],
E
E [[1.2297]],
E
E [[1.0393]],
E
E [[1.0629]],
E
E [[1.4850]],
E
E [[0.5772]],
E
E [[1.1739]],
E
E [[1.1341]],
E
E [[0.7462]],
E
E [[0.4069]],
E
E [[0.6140]],
E
E [[0.7448]],
E
E [[0.7054]],
E
E [[0.3869]],
E
E [[1.1642]],
E
E [[1.0523]],
E
E [[0.4691]],
E
E [[0.9172]],
E
E [[1.8058]],
E
E [[0.5387]],
E
E [[0.9307]],
E
E [[0.8787]],
E
E [[1.3446]],
E
E [[1.7301]],
E
E [[1.1597]],
E
E [[1.2249]],
E
E [[1.0094]],
E
E [[0.5941]],
E
E [[0.7383]],
E
E [[0.7085]],
E
E [[1.7106]],
E
E [[0.9492]],
E
E [[0.5232]],
E
E [[0.4519]],
E
E [[0.5850]],
E
E [[0.8983]],
E
E [[0.9395]],
E
E [[1.3882]],
E
E [[1.0947]],
E
E [[0.7077]],
E
E [[0.9744]],
E
E [[0.9194]],
E
E [[0.8728]],
E
E [[1.2173]],
E
E [[0.6492]],
E
E [[1.3920]],
E
E [[0.8690]],
E
E [[0.1645]],
E
E [[0.7734]],
E
E [[1.4431]],
E
E [[0.9427]],
E
E [[1.6042]],
E
E [[0.4451]],
E
E [[0.7527]],
E
E [[1.7713]],
E
E [[0.6460]],
E
E [[1.9049]],
E
E [[1.4731]],
E
E [[1.0465]],
E
E [[1.6934]],
E
E [[0.7938]],
E
E [[1.1985]],
E
E [[1.0119]],
E
E [[1.0852]],
E
E [[1.0523]],
E
E [[0.6674]],
E
E [[1.5031]],
E
E [[1.4079]],
E
E [[1.2637]],
E
E [[1.5320]],
E
E [[0.8442]],
E
E [[0.4035]],
E
E [[0.9104]],
E
E [[1.2754]],
E
E [[1.6776]],
E
E [[1.0952]],
E
E [[0.9740]],
E
E [[1.2101]],
E
E [[0.3570]],
E
E [[0.2600]],
E
E [[0.6920]],
E
E [[1.2641]],
E
E [[0.4005]],
E
E [[0.8563]],
E
E [[0.9133]],
E
E [[0.8862]],
E
E [[0.4125]],
E
E [[0.3939]],
E
E [[0.7333]],
E
E [[1.8460]],
E
E [[1.1237]],
E
E [[1.6750]],
E
E [[1.6145]],
E
E [[1.1031]],
E
E [[1.0992]],
E
E [[0.6235]],
E
E [[1.3412]],
E
E [[1.1457]],
E
E [[1.1195]],
E
E [[0.7569]],
E
E [[0.7361]],
E
E [[0.7995]],
E
E [[1.7021]],
E
E [[0.5470]],
E
E [[1.4125]],
E
E [[1.4078]],
E
E [[1.4750]],
E
E [[1.1136]],
E
E [[0.1784]],
E
E [[1.4374]],
E
E [[0.1957]],
E
E [[0.4324]],
E
E [[1.2143]],
E
E [[0.6173]],
E
E [[0.6785]],
E
E [[1.2819]],
E
E [[1.4744]],
E
E [[1.9414]],
E
E [[1.1064]],
E
E [[0.4062]],
E
E [[0.9223]],
E
E [[0.4108]],
E
E [[0.3110]],
E
E [[0.9191]],
E
E [[1.2791]],
E
E [[1.2254]],
E
E [[0.3572]],
E
E [[0.9373]],
E
E [[1.7143]],
E
E [[0.6263]],
E
E [[1.5295]],
E
E [[0.2024]],
E
E [[0.6213]],
E
E [[0.5846]],
E
E [[0.5907]],
E
E [[1.0082]],
E
E [[1.1991]],
E
E [[1.0248]],
E
E [[1.0223]],
E
E [[0.6894]],
E
E [[0.0441]],
E
E [[1.0225]],
E
E [[1.1920]],
E
E [[0.2592]],
E
E [[0.5326]],
E
E [[1.5008]],
E
E [[1.1492]],
E
E [[1.8549]],
E
E [[1.0293]],
E
E [[1.1905]],
E
E [[1.1300]],
E
E [[0.6930]],
E
E [[1.8925]],
E
E [[0.8223]],
E
E [[1.0685]],
E
E [[0.4496]],
E
E [[0.3425]],
E
E [[1.3118]],
E
E [[0.6297]],
E
E [[0.2179]],
E
E [[1.7841]],
E
E [[0.8223]],
E
E [[0.9522]],
E
E [[1.5563]],
E
E [[1.2103]],
E
E [[1.1195]],
E
E [[0.5960]],
E
E [[0.9722]],
E
E [[1.3726]],
E
E [[1.0165]],
E
E [[1.0131]],
E
E [[1.1399]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 1280, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.6228]],
[[0.8947]],
[[0.2150]],
...,
[[0.8623]],
[[0.9484]],
[[0.9666]]]]), compiled_model=tensor([[[[0.6228]],
[[1.7609]],
[[0.6886]],
...,
[[1.4463]],
[[1.8873]],
[[1.5632]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 1280, 1, 1), torch.float32)], {'model_name': ['pt_mobilnetv3_mobilenetv3_large_100_img_cls_timm']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0b0be20>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0acea10>
fw_out = tensor([[[[0.6228]],
[[0.8947]],
[[0.2150]],
...,
[[0.8623]],
[[0.9484]],
[[0.9666]]]])
co_out = tensor([[[[0.6228]],
[[1.7609]],
[[0.6886]],
...,
[[1.4463]],
[[1.8873]],
[[1.5632]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.6228]],
E
E [[0.8947]],
E
E [[0.2150]],
E
E ...,
E
E [[0.8623]],
E
E [[0.9484]],
E
E [[0.9666]]]]), compiled_model=tensor([[[[0.6228]],
E
E [[1.7609]],
E
E [[0.6886]],
E
E ...,
E
E [[1.4463]],
E
E [[1.8873]],
E
E [[1.5632]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 256, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.5007]],
[[0.7726]],
[[0.0929]],
[[0.1365]],
[[0.3118]],
[[0.6385]],
[[0.4945]],
[[0.9009]],
[[0.4601]],
[[0.6367]],
[[0.3533]],
[[0.4061]],
[[0.0268]],
[[0.1733]],
[[0.2983]],
[[0.5229]],
[[0.7021]],
[[0.8044]],
[[0.1655]],
[[0.2867]],
[[0.6860]],
[[0.9196]],
[[0.4015]],
[[0.8786]],
[[0.4238]],
[[0.5573]],
[[0.9572]],
[[0.0406]],
[[0.1897]],
[[0.3778]],
[[0.3095]],
[[0.9364]],
[[0.1803]],
[[0.2743]],
[[0.1551]],
[[0.0361]],
[[0.2126]],
[[0.9342]],
[[0.7275]],
[[0.7468]],
[[0.5307]],
[[0.2481]],
[[0.5890]],
[[0.0376]],
[[0.1431]],
[[0.2467]],
[[0.8199]],
[[0.7976]],
[[0.2827]],
[[0.4864]],
[[0.8242]],
[[1.0015]],
[[0.7029]],
[[0.5720]],
[[0.8397]],
[[0.2100]],
[[0.5976]],
[[0.1168]],
[[0.1579]],
[[0.2461]],
[[0.7307]],
[[0.7055]],
[[0.2082]],
[[0.6555]],
[[0.7789]],
[[0.4413]],
[[0.5235]],
[[0.6203]],
[[0.8146]],
[[0.9845]],
[[0.1191]],
[[0.3212]],
[[0.7009]],
[[0.9187]],
[[0.9395]],
[[0.9456]],
[[0.6039]],
[[0.0696]],
[[0.5504]],
[[0.1916]],
[[0.0384]],
[[0.9487]],
[[0.8846]],
[[0.0057]],
[[0.5980]],
[[0.4202]],
[[0.4221]],
[[0.2755]],
[[0.6967]],
[[0.2083]],
[[0.6877]],
[[0.7573]],
[[0.8624]],
[[0.6914]],
[[0.0096]],
[[0.1801]],
[[0.7541]],
[[0.6091]],
[[0.1144]],
[[0.2165]],
[[0.9748]],
[[0.8413]],
[[0.2864]],
[[0.3786]],
[[0.0281]],
[[0.4954]],
[[0.1279]],
[[0.1187]],
[[0.4769]],
[[0.5795]],
[[0.2997]],
[[0.8011]],
[[0.2002]],
[[0.9581]],
[[0.8471]],
[[0.0828]],
[[0.3800]],
[[0.5270]],
[[0.5774]],
[[0.6230]],
[[0.7006]],
[[0.5344]],
[[0.2605]],
[[0.7410]],
[[0.0248]],
[[0.2081]],
[[0.3793]],
[[0.2609]],
[[0.3295]],
[[0.0946]],
[[0.3981]],
[[0.6113]],
[[0.1787]],
[[0.4788]],
[[0.8624]],
[[0.4530]],
[[0.5183]],
[[0.4613]],
[[0.6056]],
[[0.8223]],
[[0.9780]],
[[0.8220]],
[[0.9791]],
[[0.4683]],
[[0.0553]],
[[0.2674]],
[[0.8449]],
[[0.5012]],
[[0.2559]],
[[0.1213]],
[[0.0365]],
[[0.0824]],
[[0.4030]],
[[0.7786]],
[[0.7747]],
[[0.0222]],
[[0.8163]],
[[0.1132]],
[[0.3987]],
[[0.3017]],
[[0.4081]],
[[0.4063]],
[[0.0558]],
[[0.0727]],
[[0.4262]],
[[0.5109]],
[[0.2773]],
[[0.6928]],
[[0.0544]],
[[0.4707]],
[[0.9441]],
[[0.3005]],
[[0.9559]],
[[0.6855]],
[[0.0532]],
[[0.8208]],
[[0.4467]],
[[0.2812]],
[[0.9043]],
[[0.1004]],
[[0.5581]],
[[0.3997]],
[[0.8615]],
[[0.6440]],
[[0.7447]],
[[0.6810]],
[[0.3842]],
[[0.3993]],
[[0.0924]],
[[0.7753]],
[[0.9014]],
[[0.8465]],
[[0.1517]],
[[0.5267]],
[[0.1520]],
[[0.2292]],
[[0.2131]],
[[0.6753]],
[[0.2065]],
[[0.4935]],
[[0.5255]],
[[0.8267]],
[[0.1265]],
[[0.1612]],
[[0.2141]],
[[0.8544]],
[[0.3247]],
[[0.9262]],
[[0.6852]],
[[0.5677]],
[[0.5007]],
[[0.4056]],
[[0.5672]],
[[0.3903]],
[[0.5009]],
[[0.5682]],
[[0.1133]],
[[0.2424]],
[[0.9082]],
[[0.0987]],
[[0.4685]],
[[0.9990]],
[[0.6850]],
[[0.5186]],
[[0.0711]],
[[0.7521]],
[[0.1483]],
[[0.3625]],
[[0.3367]],
[[0.4304]],
[[0.5099]],
[[0.9168]],
[[0.5668]],
[[0.9523]],
[[0.8103]],
[[0.1883]],
[[0.7287]],
[[0.1510]],
[[0.2925]],
[[0.6515]],
[[0.6695]],
[[0.8795]],
[[0.3435]],
[[0.5052]],
[[0.7618]],
[[0.0209]],
[[0.8659]],
[[0.0910]],
[[0.5113]],
[[0.4194]],
[[0.2411]],
[[0.5705]],
[[0.9179]],
[[0.3583]],
[[0.2076]],
[[0.3195]]]]), compiled_model=tensor([[[[0.5007]],
[[1.0833]],
[[0.1111]],
[[0.7676]],
[[0.5815]],
[[1.3750]],
[[0.9302]],
[[1.7932]],
[[1.3675]],
[[1.3608]],
[[0.7058]],
[[0.7968]],
[[0.0291]],
[[0.6526]],
[[1.0567]],
[[0.7177]],
[[1.5163]],
[[0.8480]],
[[1.0810]],
[[0.2880]],
[[0.8246]],
[[1.6700]],
[[0.4615]],
[[1.3098]],
[[0.8110]],
[[1.1752]],
[[1.9229]],
[[1.0305]],
[[0.9996]],
[[1.1787]],
[[1.1051]],
[[1.5775]],
[[0.4782]],
[[1.2507]],
[[1.0430]],
[[0.3706]],
[[0.2834]],
[[1.5365]],
[[1.4345]],
[[1.0363]],
[[0.5581]],
[[1.1543]],
[[0.6859]],
[[0.6292]],
[[1.1190]],
[[0.7157]],
[[0.9704]],
[[1.6686]],
[[0.3302]],
[[1.4491]],
[[1.2033]],
[[1.7087]],
[[1.6379]],
[[0.8312]],
[[0.9579]],
[[1.0898]],
[[0.7372]],
[[0.5309]],
[[0.8488]],
[[1.1008]],
[[1.1074]],
[[1.3519]],
[[0.6930]],
[[0.7698]],
[[1.6000]],
[[0.6748]],
[[1.4036]],
[[0.7495]],
[[0.8659]],
[[1.5590]],
[[0.7656]],
[[0.5788]],
[[0.7979]],
[[1.6396]],
[[1.9307]],
[[1.5490]],
[[0.6194]],
[[0.2392]],
[[1.4746]],
[[0.3025]],
[[0.5461]],
[[1.5064]],
[[1.0947]],
[[0.2485]],
[[0.8000]],
[[0.8701]],
[[1.1739]],
[[0.3325]],
[[1.2654]],
[[1.1192]],
[[1.0590]],
[[1.4134]],
[[1.4218]],
[[1.6082]],
[[0.4278]],
[[0.7034]],
[[1.5209]],
[[1.0721]],
[[0.1942]],
[[0.8429]],
[[1.7676]],
[[1.3874]],
[[1.1295]],
[[0.8938]],
[[0.3886]],
[[1.0208]],
[[0.1447]],
[[0.1625]],
[[1.4292]],
[[1.1861]],
[[1.1629]],
[[1.4072]],
[[1.1781]],
[[1.2112]],
[[1.2315]],
[[0.7691]],
[[1.1927]],
[[1.2691]],
[[1.3597]],
[[0.6425]],
[[1.3034]],
[[0.6378]],
[[0.9813]],
[[1.1830]],
[[0.9426]],
[[0.3476]],
[[1.2771]],
[[0.5963]],
[[0.7398]],
[[0.1748]],
[[0.7788]],
[[0.7382]],
[[1.0013]],
[[0.7253]],
[[1.1447]],
[[1.3771]],
[[1.4588]],
[[0.5633]],
[[1.3755]],
[[1.0139]],
[[1.3044]],
[[1.7729]],
[[1.5133]],
[[0.5623]],
[[0.4008]],
[[1.2544]],
[[1.1567]],
[[1.2808]],
[[0.7198]],
[[0.3123]],
[[0.1334]],
[[0.1668]],
[[1.3710]],
[[1.5535]],
[[1.1276]],
[[0.6145]],
[[0.8178]],
[[0.4643]],
[[0.8554]],
[[1.0430]],
[[0.4856]],
[[1.2424]],
[[0.9182]],
[[0.2089]],
[[0.6493]],
[[0.7188]],
[[0.4654]],
[[1.0899]],
[[0.5886]],
[[1.0847]],
[[1.4840]],
[[1.1220]],
[[1.5739]],
[[1.4431]],
[[0.9976]],
[[1.1010]],
[[0.6033]],
[[0.3334]],
[[0.9069]],
[[0.2684]],
[[1.2296]],
[[0.7039]],
[[1.5562]],
[[0.8001]],
[[1.2129]],
[[0.6900]],
[[0.7916]],
[[0.9016]],
[[0.9757]],
[[1.7020]],
[[1.1032]],
[[1.4291]],
[[0.8792]],
[[0.9530]],
[[0.6343]],
[[1.0525]],
[[0.3332]],
[[0.7085]],
[[0.9349]],
[[1.1063]],
[[0.6541]],
[[1.4604]],
[[1.0428]],
[[1.0934]],
[[0.6377]],
[[1.1438]],
[[1.1743]],
[[1.0292]],
[[1.1690]],
[[1.3792]],
[[1.1073]],
[[0.8540]],
[[0.9149]],
[[0.6280]],
[[1.3783]],
[[0.5695]],
[[0.7668]],
[[0.7210]],
[[1.0442]],
[[1.0575]],
[[1.0338]],
[[1.4432]],
[[1.0380]],
[[1.3695]],
[[0.5026]],
[[1.0429]],
[[0.5019]],
[[1.0990]],
[[0.6049]],
[[0.5166]],
[[0.9275]],
[[1.1374]],
[[1.1139]],
[[1.7617]],
[[1.3167]],
[[0.7533]],
[[1.0604]],
[[0.7477]],
[[0.9576]],
[[1.1988]],
[[1.6290]],
[[1.7913]],
[[1.3225]],
[[1.0912]],
[[1.2269]],
[[0.5664]],
[[1.7082]],
[[0.3874]],
[[1.3478]],
[[0.5136]],
[[0.9858]],
[[0.9751]],
[[1.5083]],
[[0.7858]],
[[0.2723]],
[[0.9851]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 256, 1, 1), torch.float32)], {'model_name': ['pt_vovnet_ese_vovnet39b_obj_det_torchhub', 'pt_vovnet_ese_vovnet19b_dw_obj_det_torchhub', 'pt_vovnet_ese_vovnet99b_obj_det_torchhub']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0be8ee0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0a217e0>
fw_out = tensor([[[[0.5007]],
[[0.7726]],
[[0.0929]],
[[0.1365]],
[[0.3118]],
...2411]],
[[0.5705]],
[[0.9179]],
[[0.3583]],
[[0.2076]],
[[0.3195]]]])
co_out = tensor([[[[0.5007]],
[[1.0833]],
[[0.1111]],
[[0.7676]],
[[0.5815]],
...9858]],
[[0.9751]],
[[1.5083]],
[[0.7858]],
[[0.2723]],
[[0.9851]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.5007]],
E
E [[0.7726]],
E
E [[0.0929]],
E
E [[0.1365]],
E
E [[0.3118]],
E
E [[0.6385]],
E
E [[0.4945]],
E
E [[0.9009]],
E
E [[0.4601]],
E
E [[0.6367]],
E
E [[0.3533]],
E
E [[0.4061]],
E
E [[0.0268]],
E
E [[0.1733]],
E
E [[0.2983]],
E
E [[0.5229]],
E
E [[0.7021]],
E
E [[0.8044]],
E
E [[0.1655]],
E
E [[0.2867]],
E
E [[0.6860]],
E
E [[0.9196]],
E
E [[0.4015]],
E
E [[0.8786]],
E
E [[0.4238]],
E
E [[0.5573]],
E
E [[0.9572]],
E
E [[0.0406]],
E
E [[0.1897]],
E
E [[0.3778]],
E
E [[0.3095]],
E
E [[0.9364]],
E
E [[0.1803]],
E
E [[0.2743]],
E
E [[0.1551]],
E
E [[0.0361]],
E
E [[0.2126]],
E
E [[0.9342]],
E
E [[0.7275]],
E
E [[0.7468]],
E
E [[0.5307]],
E
E [[0.2481]],
E
E [[0.5890]],
E
E [[0.0376]],
E
E [[0.1431]],
E
E [[0.2467]],
E
E [[0.8199]],
E
E [[0.7976]],
E
E [[0.2827]],
E
E [[0.4864]],
E
E [[0.8242]],
E
E [[1.0015]],
E
E [[0.7029]],
E
E [[0.5720]],
E
E [[0.8397]],
E
E [[0.2100]],
E
E [[0.5976]],
E
E [[0.1168]],
E
E [[0.1579]],
E
E [[0.2461]],
E
E [[0.7307]],
E
E [[0.7055]],
E
E [[0.2082]],
E
E [[0.6555]],
E
E [[0.7789]],
E
E [[0.4413]],
E
E [[0.5235]],
E
E [[0.6203]],
E
E [[0.8146]],
E
E [[0.9845]],
E
E [[0.1191]],
E
E [[0.3212]],
E
E [[0.7009]],
E
E [[0.9187]],
E
E [[0.9395]],
E
E [[0.9456]],
E
E [[0.6039]],
E
E [[0.0696]],
E
E [[0.5504]],
E
E [[0.1916]],
E
E [[0.0384]],
E
E [[0.9487]],
E
E [[0.8846]],
E
E [[0.0057]],
E
E [[0.5980]],
E
E [[0.4202]],
E
E [[0.4221]],
E
E [[0.2755]],
E
E [[0.6967]],
E
E [[0.2083]],
E
E [[0.6877]],
E
E [[0.7573]],
E
E [[0.8624]],
E
E [[0.6914]],
E
E [[0.0096]],
E
E [[0.1801]],
E
E [[0.7541]],
E
E [[0.6091]],
E
E [[0.1144]],
E
E [[0.2165]],
E
E [[0.9748]],
E
E [[0.8413]],
E
E [[0.2864]],
E
E [[0.3786]],
E
E [[0.0281]],
E
E [[0.4954]],
E
E [[0.1279]],
E
E [[0.1187]],
E
E [[0.4769]],
E
E [[0.5795]],
E
E [[0.2997]],
E
E [[0.8011]],
E
E [[0.2002]],
E
E [[0.9581]],
E
E [[0.8471]],
E
E [[0.0828]],
E
E [[0.3800]],
E
E [[0.5270]],
E
E [[0.5774]],
E
E [[0.6230]],
E
E [[0.7006]],
E
E [[0.5344]],
E
E [[0.2605]],
E
E [[0.7410]],
E
E [[0.0248]],
E
E [[0.2081]],
E
E [[0.3793]],
E
E [[0.2609]],
E
E [[0.3295]],
E
E [[0.0946]],
E
E [[0.3981]],
E
E [[0.6113]],
E
E [[0.1787]],
E
E [[0.4788]],
E
E [[0.8624]],
E
E [[0.4530]],
E
E [[0.5183]],
E
E [[0.4613]],
E
E [[0.6056]],
E
E [[0.8223]],
E
E [[0.9780]],
E
E [[0.8220]],
E
E [[0.9791]],
E
E [[0.4683]],
E
E [[0.0553]],
E
E [[0.2674]],
E
E [[0.8449]],
E
E [[0.5012]],
E
E [[0.2559]],
E
E [[0.1213]],
E
E [[0.0365]],
E
E [[0.0824]],
E
E [[0.4030]],
E
E [[0.7786]],
E
E [[0.7747]],
E
E [[0.0222]],
E
E [[0.8163]],
E
E [[0.1132]],
E
E [[0.3987]],
E
E [[0.3017]],
E
E [[0.4081]],
E
E [[0.4063]],
E
E [[0.0558]],
E
E [[0.0727]],
E
E [[0.4262]],
E
E [[0.5109]],
E
E [[0.2773]],
E
E [[0.6928]],
E
E [[0.0544]],
E
E [[0.4707]],
E
E [[0.9441]],
E
E [[0.3005]],
E
E [[0.9559]],
E
E [[0.6855]],
E
E [[0.0532]],
E
E [[0.8208]],
E
E [[0.4467]],
E
E [[0.2812]],
E
E [[0.9043]],
E
E [[0.1004]],
E
E [[0.5581]],
E
E [[0.3997]],
E
E [[0.8615]],
E
E [[0.6440]],
E
E [[0.7447]],
E
E [[0.6810]],
E
E [[0.3842]],
E
E [[0.3993]],
E
E [[0.0924]],
E
E [[0.7753]],
E
E [[0.9014]],
E
E [[0.8465]],
E
E [[0.1517]],
E
E [[0.5267]],
E
E [[0.1520]],
E
E [[0.2292]],
E
E [[0.2131]],
E
E [[0.6753]],
E
E [[0.2065]],
E
E [[0.4935]],
E
E [[0.5255]],
E
E [[0.8267]],
E
E [[0.1265]],
E
E [[0.1612]],
E
E [[0.2141]],
E
E [[0.8544]],
E
E [[0.3247]],
E
E [[0.9262]],
E
E [[0.6852]],
E
E [[0.5677]],
E
E [[0.5007]],
E
E [[0.4056]],
E
E [[0.5672]],
E
E [[0.3903]],
E
E [[0.5009]],
E
E [[0.5682]],
E
E [[0.1133]],
E
E [[0.2424]],
E
E [[0.9082]],
E
E [[0.0987]],
E
E [[0.4685]],
E
E [[0.9990]],
E
E [[0.6850]],
E
E [[0.5186]],
E
E [[0.0711]],
E
E [[0.7521]],
E
E [[0.1483]],
E
E [[0.3625]],
E
E [[0.3367]],
E
E [[0.4304]],
E
E [[0.5099]],
E
E [[0.9168]],
E
E [[0.5668]],
E
E [[0.9523]],
E
E [[0.8103]],
E
E [[0.1883]],
E
E [[0.7287]],
E
E [[0.1510]],
E
E [[0.2925]],
E
E [[0.6515]],
E
E [[0.6695]],
E
E [[0.8795]],
E
E [[0.3435]],
E
E [[0.5052]],
E
E [[0.7618]],
E
E [[0.0209]],
E
E [[0.8659]],
E
E [[0.0910]],
E
E [[0.5113]],
E
E [[0.4194]],
E
E [[0.2411]],
E
E [[0.5705]],
E
E [[0.9179]],
E
E [[0.3583]],
E
E [[0.2076]],
E
E [[0.3195]]]]), compiled_model=tensor([[[[0.5007]],
E
E [[1.0833]],
E
E [[0.1111]],
E
E [[0.7676]],
E
E [[0.5815]],
E
E [[1.3750]],
E
E [[0.9302]],
E
E [[1.7932]],
E
E [[1.3675]],
E
E [[1.3608]],
E
E [[0.7058]],
E
E [[0.7968]],
E
E [[0.0291]],
E
E [[0.6526]],
E
E [[1.0567]],
E
E [[0.7177]],
E
E [[1.5163]],
E
E [[0.8480]],
E
E [[1.0810]],
E
E [[0.2880]],
E
E [[0.8246]],
E
E [[1.6700]],
E
E [[0.4615]],
E
E [[1.3098]],
E
E [[0.8110]],
E
E [[1.1752]],
E
E [[1.9229]],
E
E [[1.0305]],
E
E [[0.9996]],
E
E [[1.1787]],
E
E [[1.1051]],
E
E [[1.5775]],
E
E [[0.4782]],
E
E [[1.2507]],
E
E [[1.0430]],
E
E [[0.3706]],
E
E [[0.2834]],
E
E [[1.5365]],
E
E [[1.4345]],
E
E [[1.0363]],
E
E [[0.5581]],
E
E [[1.1543]],
E
E [[0.6859]],
E
E [[0.6292]],
E
E [[1.1190]],
E
E [[0.7157]],
E
E [[0.9704]],
E
E [[1.6686]],
E
E [[0.3302]],
E
E [[1.4491]],
E
E [[1.2033]],
E
E [[1.7087]],
E
E [[1.6379]],
E
E [[0.8312]],
E
E [[0.9579]],
E
E [[1.0898]],
E
E [[0.7372]],
E
E [[0.5309]],
E
E [[0.8488]],
E
E [[1.1008]],
E
E [[1.1074]],
E
E [[1.3519]],
E
E [[0.6930]],
E
E [[0.7698]],
E
E [[1.6000]],
E
E [[0.6748]],
E
E [[1.4036]],
E
E [[0.7495]],
E
E [[0.8659]],
E
E [[1.5590]],
E
E [[0.7656]],
E
E [[0.5788]],
E
E [[0.7979]],
E
E [[1.6396]],
E
E [[1.9307]],
E
E [[1.5490]],
E
E [[0.6194]],
E
E [[0.2392]],
E
E [[1.4746]],
E
E [[0.3025]],
E
E [[0.5461]],
E
E [[1.5064]],
E
E [[1.0947]],
E
E [[0.2485]],
E
E [[0.8000]],
E
E [[0.8701]],
E
E [[1.1739]],
E
E [[0.3325]],
E
E [[1.2654]],
E
E [[1.1192]],
E
E [[1.0590]],
E
E [[1.4134]],
E
E [[1.4218]],
E
E [[1.6082]],
E
E [[0.4278]],
E
E [[0.7034]],
E
E [[1.5209]],
E
E [[1.0721]],
E
E [[0.1942]],
E
E [[0.8429]],
E
E [[1.7676]],
E
E [[1.3874]],
E
E [[1.1295]],
E
E [[0.8938]],
E
E [[0.3886]],
E
E [[1.0208]],
E
E [[0.1447]],
E
E [[0.1625]],
E
E [[1.4292]],
E
E [[1.1861]],
E
E [[1.1629]],
E
E [[1.4072]],
E
E [[1.1781]],
E
E [[1.2112]],
E
E [[1.2315]],
E
E [[0.7691]],
E
E [[1.1927]],
E
E [[1.2691]],
E
E [[1.3597]],
E
E [[0.6425]],
E
E [[1.3034]],
E
E [[0.6378]],
E
E [[0.9813]],
E
E [[1.1830]],
E
E [[0.9426]],
E
E [[0.3476]],
E
E [[1.2771]],
E
E [[0.5963]],
E
E [[0.7398]],
E
E [[0.1748]],
E
E [[0.7788]],
E
E [[0.7382]],
E
E [[1.0013]],
E
E [[0.7253]],
E
E [[1.1447]],
E
E [[1.3771]],
E
E [[1.4588]],
E
E [[0.5633]],
E
E [[1.3755]],
E
E [[1.0139]],
E
E [[1.3044]],
E
E [[1.7729]],
E
E [[1.5133]],
E
E [[0.5623]],
E
E [[0.4008]],
E
E [[1.2544]],
E
E [[1.1567]],
E
E [[1.2808]],
E
E [[0.7198]],
E
E [[0.3123]],
E
E [[0.1334]],
E
E [[0.1668]],
E
E [[1.3710]],
E
E [[1.5535]],
E
E [[1.1276]],
E
E [[0.6145]],
E
E [[0.8178]],
E
E [[0.4643]],
E
E [[0.8554]],
E
E [[1.0430]],
E
E [[0.4856]],
E
E [[1.2424]],
E
E [[0.9182]],
E
E [[0.2089]],
E
E [[0.6493]],
E
E [[0.7188]],
E
E [[0.4654]],
E
E [[1.0899]],
E
E [[0.5886]],
E
E [[1.0847]],
E
E [[1.4840]],
E
E [[1.1220]],
E
E [[1.5739]],
E
E [[1.4431]],
E
E [[0.9976]],
E
E [[1.1010]],
E
E [[0.6033]],
E
E [[0.3334]],
E
E [[0.9069]],
E
E [[0.2684]],
E
E [[1.2296]],
E
E [[0.7039]],
E
E [[1.5562]],
E
E [[0.8001]],
E
E [[1.2129]],
E
E [[0.6900]],
E
E [[0.7916]],
E
E [[0.9016]],
E
E [[0.9757]],
E
E [[1.7020]],
E
E [[1.1032]],
E
E [[1.4291]],
E
E [[0.8792]],
E
E [[0.9530]],
E
E [[0.6343]],
E
E [[1.0525]],
E
E [[0.3332]],
E
E [[0.7085]],
E
E [[0.9349]],
E
E [[1.1063]],
E
E [[0.6541]],
E
E [[1.4604]],
E
E [[1.0428]],
E
E [[1.0934]],
E
E [[0.6377]],
E
E [[1.1438]],
E
E [[1.1743]],
E
E [[1.0292]],
E
E [[1.1690]],
E
E [[1.3792]],
E
E [[1.1073]],
E
E [[0.8540]],
E
E [[0.9149]],
E
E [[0.6280]],
E
E [[1.3783]],
E
E [[0.5695]],
E
E [[0.7668]],
E
E [[0.7210]],
E
E [[1.0442]],
E
E [[1.0575]],
E
E [[1.0338]],
E
E [[1.4432]],
E
E [[1.0380]],
E
E [[1.3695]],
E
E [[0.5026]],
E
E [[1.0429]],
E
E [[0.5019]],
E
E [[1.0990]],
E
E [[0.6049]],
E
E [[0.5166]],
E
E [[0.9275]],
E
E [[1.1374]],
E
E [[1.1139]],
E
E [[1.7617]],
E
E [[1.3167]],
E
E [[0.7533]],
E
E [[1.0604]],
E
E [[0.7477]],
E
E [[0.9576]],
E
E [[1.1988]],
E
E [[1.6290]],
E
E [[1.7913]],
E
E [[1.3225]],
E
E [[1.0912]],
E
E [[1.2269]],
E
E [[0.5664]],
E
E [[1.7082]],
E
E [[0.3874]],
E
E [[1.3478]],
E
E [[0.5136]],
E
E [[0.9858]],
E
E [[0.9751]],
E
E [[1.5083]],
E
E [[0.7858]],
E
E [[0.2723]],
E
E [[0.9851]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 512, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.8575]],
[[1.1295]],
[[0.4498]],
[[0.4933]],
[[0.6687]],
[[0.9954]],
[[0.8514]],
[[1.2577]],
[[0.8169]],
[[0.9936]],
[[0.7102]],
[[0.7630]],
[[0.3836]],
[[0.5301]],
[[0.6552]],
[[0.8798]],
[[1.0589]],
[[1.1613]],
[[0.5223]],
[[0.6435]],
[[1.0429]],
[[1.2765]],
[[0.7584]],
[[1.2354]],
[[0.7807]],
[[0.9142]],
[[1.3140]],
[[0.3974]],
[[0.5465]],
[[0.7347]],
[[0.6664]],
[[1.2933]],
[[0.5372]],
[[0.6311]],
[[0.5120]],
[[0.3930]],
[[0.5694]],
[[1.2911]],
[[1.0844]],
[[1.1036]],
[[0.8876]],
[[0.6049]],
[[0.9459]],
[[0.3944]],
[[0.5000]],
[[0.6035]],
[[1.1767]],
[[1.1544]],
[[0.6395]],
[[0.8432]],
[[1.1811]],
[[1.3583]],
[[1.0597]],
[[0.9288]],
[[1.1965]],
[[0.5669]],
[[0.9544]],
[[0.4736]],
[[0.5147]],
[[0.6030]],
[[1.0875]],
[[1.0624]],
[[0.5651]],
[[1.0123]],
[[1.1358]],
[[0.7982]],
[[0.8804]],
[[0.9771]],
[[1.1715]],
[[1.3414]],
[[0.4760]],
[[0.6780]],
[[1.0578]],
[[1.2755]],
[[1.2964]],
[[1.3025]],
[[0.9608]],
[[0.4265]],
[[0.9073]],
[[0.5485]],
[[0.3953]],
[[1.3055]],
[[1.2415]],
[[0.3625]],
[[0.9549]],
[[0.7770]],
[[0.7790]],
[[0.6324]],
[[1.0536]],
[[0.5651]],
[[1.0446]],
[[1.1141]],
[[1.2192]],
[[1.0482]],
[[0.3664]],
[[0.5369]],
[[1.1109]],
[[0.9659]],
[[0.4712]],
[[0.5734]],
[[1.3316]],
[[1.1982]],
[[0.6433]],
[[0.7354]],
[[0.3850]],
[[0.8523]],
[[0.4847]],
[[0.4756]],
[[0.8337]],
[[0.9363]],
[[0.6565]],
[[1.1580]],
[[0.5570]],
[[1.3150]],
[[1.2039]],
[[0.4396]],
[[0.7368]],
[[0.8838]],
[[0.9342]],
[[0.9799]],
[[1.0575]],
[[0.8912]],
[[0.6173]],
[[1.0979]],
[[0.3816]],
[[0.5649]],
[[0.7361]],
[[0.6177]],
[[0.6864]],
[[0.4515]],
[[0.7549]],
[[0.9682]],
[[0.5355]],
[[0.8356]],
[[1.2192]],
[[0.8099]],
[[0.8752]],
[[0.8181]],
[[0.9625]],
[[1.1792]],
[[1.3349]],
[[1.1788]],
[[1.3360]],
[[0.8251]],
[[0.4121]],
[[0.6242]],
[[1.2017]],
[[0.8580]],
[[0.6127]],
[[0.4781]],
[[0.3933]],
[[0.4393]],
[[0.7599]],
[[1.1355]],
[[1.1316]],
[[0.3791]],
[[1.1732]],
[[0.4700]],
[[0.7556]],
[[0.6585]],
[[0.7650]],
[[0.7631]],
[[0.4126]],
[[0.4296]],
[[0.7830]],
[[0.8677]],
[[0.6341]],
[[1.0496]],
[[0.4112]],
[[0.8275]],
[[1.3010]],
[[0.6573]],
[[1.3128]],
[[1.0423]],
[[0.4100]],
[[1.1776]],
[[0.8036]],
[[0.6381]],
[[1.2611]],
[[0.4572]],
[[0.9149]],
[[0.7566]],
[[1.2183]],
[[1.0008]],
[[1.1015]],
[[1.0379]],
[[0.7410]],
[[0.7561]],
[[0.4492]],
[[1.1322]],
[[1.2583]],
[[1.2034]],
[[0.5086]],
[[0.8836]],
[[0.5088]],
[[0.5860]],
[[0.5699]],
[[1.0321]],
[[0.5633]],
[[0.8504]],
[[0.8823]],
[[1.1836]],
[[0.4833]],
[[0.5180]],
[[0.5709]],
[[1.2112]],
[[0.6815]],
[[1.2830]],
[[1.0421]],
[[0.9246]],
[[0.8576]],
[[0.7624]],
[[0.9240]],
[[0.7471]],
[[0.8578]],
[[0.9251]],
[[0.4702]],
[[0.5992]],
[[1.2650]],
[[0.4555]],
[[0.8254]],
[[1.3559]],
[[1.0419]],
[[0.8754]],
[[0.4280]],
[[1.1090]],
[[0.5051]],
[[0.7193]],
[[0.6935]],
[[0.7872]],
[[0.8667]],
[[1.2737]],
[[0.9237]],
[[1.3091]],
[[1.1671]],
[[0.5452]],
[[1.0855]],
[[0.5078]],
[[0.6494]],
[[1.0083]],
[[1.0264]],
[[1.2364]],
[[0.7003]],
[[0.8621]],
[[1.1187]],
[[0.3777]],
[[1.2228]],
[[0.4478]],
[[0.8682]],
[[0.7763]],
[[0.5979]],
[[0.9274]],
[[1.2747]],
[[0.7151]],
[[0.5644]],
[[0.6764]],
[[0.3657]],
[[1.0870]],
[[0.6211]],
[[0.5276]],
[[0.5732]],
[[1.1488]],
[[1.1261]],
[[1.2450]],
[[1.0426]],
[[0.6943]],
[[0.7215]],
[[1.0090]],
[[1.2723]],
[[0.9972]],
[[0.6247]],
[[0.6262]],
[[0.3885]],
[[0.9693]],
[[0.5807]],
[[0.4155]],
[[1.2997]],
[[0.5366]],
[[0.8044]],
[[1.0045]],
[[0.8772]],
[[0.5248]],
[[0.4571]],
[[1.2598]],
[[0.9427]],
[[1.2761]],
[[0.6937]],
[[1.0086]],
[[0.7469]],
[[0.8390]],
[[0.5568]],
[[1.0304]],
[[1.0194]],
[[0.8510]],
[[0.7488]],
[[0.5531]],
[[1.2070]],
[[0.4891]],
[[1.0661]],
[[0.6931]],
[[0.6200]],
[[0.9511]],
[[0.6015]],
[[0.9765]],
[[0.9595]],
[[0.4900]],
[[0.9445]],
[[1.0742]],
[[1.0592]],
[[0.7983]],
[[0.4514]],
[[0.7842]],
[[1.0349]],
[[0.6788]],
[[1.0511]],
[[1.1943]],
[[0.6002]],
[[0.8662]],
[[1.0680]],
[[0.9005]],
[[0.9030]],
[[0.9237]],
[[0.4682]],
[[0.9006]],
[[1.2075]],
[[1.3118]],
[[1.1551]],
[[0.9283]],
[[1.0948]],
[[0.6180]],
[[0.4469]],
[[0.4313]],
[[1.3601]],
[[1.1787]],
[[0.5157]],
[[1.0569]],
[[1.2389]],
[[1.3611]],
[[1.2985]],
[[1.2486]],
[[0.7466]],
[[0.6858]],
[[1.2718]],
[[1.1415]],
[[0.5604]],
[[1.3108]],
[[1.1029]],
[[1.1338]],
[[0.5479]],
[[1.0047]],
[[0.6860]],
[[1.2520]],
[[0.7713]],
[[1.0559]],
[[0.9501]],
[[1.0740]],
[[0.6914]],
[[1.1051]],
[[0.5120]],
[[0.9742]],
[[0.5230]],
[[0.3680]],
[[0.4597]],
[[1.2560]],
[[1.1318]],
[[1.3304]],
[[1.2618]],
[[0.4147]],
[[0.5201]],
[[0.7805]],
[[0.5366]],
[[1.2085]],
[[0.4833]],
[[0.6173]],
[[0.3782]],
[[0.5774]],
[[1.2725]],
[[1.2707]],
[[1.2192]],
[[1.2473]],
[[1.3059]],
[[0.7332]],
[[1.0813]],
[[1.3067]],
[[1.0267]],
[[1.3611]],
[[1.1206]],
[[1.1721]],
[[0.6863]],
[[1.1012]],
[[0.9187]],
[[0.7419]],
[[0.5794]],
[[0.5807]],
[[0.4765]],
[[1.1969]],
[[1.2167]],
[[0.8044]],
[[0.5719]],
[[1.2477]],
[[1.1810]],
[[0.8984]],
[[0.6252]],
[[1.3208]],
[[1.0657]],
[[0.4817]],
[[1.3398]],
[[1.2410]],
[[0.6790]],
[[1.1423]],
[[0.5772]],
[[0.7829]],
[[1.2858]],
[[0.8819]],
[[0.5077]],
[[0.6942]],
[[0.7256]],
[[0.7648]],
[[0.9091]],
[[1.3237]],
[[0.8880]],
[[0.5526]],
[[0.8869]],
[[1.1010]],
[[1.1093]],
[[0.4043]],
[[0.7718]],
[[0.4897]],
[[0.6479]],
[[1.0414]],
[[0.5062]],
[[1.0471]],
[[1.2857]],
[[0.8941]],
[[0.5280]],
[[0.6821]],
[[0.9705]],
[[0.4801]],
[[1.1097]],
[[0.4073]],
[[0.3806]],
[[0.3754]],
[[0.7598]],
[[1.1975]],
[[0.3880]],
[[1.2769]],
[[0.6613]],
[[1.0077]],
[[0.8841]],
[[0.4104]],
[[1.2759]],
[[1.1305]],
[[1.3583]],
[[1.1139]],
[[0.5312]],
[[1.2786]],
[[0.8881]],
[[1.0984]],
[[0.4604]],
[[0.7175]],
[[0.3703]],
[[0.6665]],
[[0.9691]],
[[0.4687]],
[[1.0207]],
[[1.1297]],
[[0.9309]],
[[0.5267]],
[[0.4736]],
[[0.7070]],
[[1.0808]],
[[1.3545]],
[[1.1488]],
[[0.8050]],
[[1.0366]],
[[0.3707]],
[[0.4342]],
[[1.0946]],
[[0.5781]],
[[1.1018]],
[[0.5083]],
[[0.6136]],
[[0.4494]],
[[1.1222]],
[[0.8103]],
[[1.2461]],
[[1.1707]],
[[1.1379]],
[[0.8774]],
[[0.7067]],
[[0.7526]],
[[0.9277]],
[[1.1091]],
[[0.5110]],
[[1.2809]],
[[0.8069]],
[[0.4423]],
[[0.5907]],
[[1.3037]],
[[1.3185]],
[[0.3981]],
[[1.2139]],
[[1.1119]],
[[1.1572]],
[[1.2845]],
[[0.5918]],
[[1.0192]],
[[1.0659]],
[[0.7135]],
[[1.0286]],
[[0.7174]],
[[1.1704]]]]), compiled_model=tensor([[[[0.8575]],
[[1.2002]],
[[0.1577]],
[[0.8021]],
[[0.4638]],
[[0.9231]],
[[0.9660]],
[[1.1812]],
[[1.0998]],
[[1.6226]],
[[1.0790]],
[[0.7036]],
[[0.1202]],
[[0.4158]],
[[0.6009]],
[[1.1999]],
[[1.4442]],
[[1.4049]],
[[0.6396]],
[[0.7371]],
[[0.9748]],
[[1.3476]],
[[0.6026]],
[[1.6758]],
[[0.9201]],
[[1.6363]],
[[1.0638]],
[[0.8038]],
[[0.7667]],
[[1.7484]],
[[1.2353]],
[[2.7252]],
[[1.5434]],
[[1.6307]],
[[0.8564]],
[[0.8285]],
[[0.2373]],
[[1.5824]],
[[1.7798]],
[[1.4600]],
[[2.0426]],
[[1.0916]],
[[1.6656]],
[[0.3212]],
[[0.9633]],
[[1.9122]],
[[1.2769]],
[[2.1030]],
[[1.0892]],
[[1.6572]],
[[2.7426]],
[[2.0276]],
[[1.6980]],
[[1.7462]],
[[1.9403]],
[[1.7831]],
[[1.0714]],
[[1.3630]],
[[1.1965]],
[[0.6123]],
[[1.0096]],
[[2.2375]],
[[1.6383]],
[[1.6874]],
[[1.3326]],
[[1.5912]],
[[1.2050]],
[[1.2450]],
[[1.9292]],
[[1.6958]],
[[1.0851]],
[[1.9854]],
[[1.0267]],
[[2.3634]],
[[2.1384]],
[[2.6499]],
[[2.2374]],
[[0.8964]],
[[1.5039]],
[[1.2770]],
[[0.7712]],
[[1.4751]],
[[1.7289]],
[[1.1020]],
[[1.7010]],
[[1.7676]],
[[1.1108]],
[[1.0409]],
[[2.2923]],
[[0.8787]],
[[2.0869]],
[[1.5024]],
[[1.7239]],
[[2.2459]],
[[0.7708]],
[[0.7545]],
[[1.5475]],
[[2.2443]],
[[2.0407]],
[[1.7611]],
[[1.5898]],
[[1.0761]],
[[1.7566]],
[[0.6767]],
[[0.5698]],
[[1.9974]],
[[1.2182]],
[[0.3628]],
[[1.2725]],
[[1.4451]],
[[1.4691]],
[[1.1292]],
[[1.4611]],
[[2.0729]],
[[1.9016]],
[[1.4917]],
[[1.7973]],
[[2.1308]],
[[1.0007]],
[[1.3220]],
[[2.2171]],
[[1.6021]],
[[0.4502]],
[[1.5795]],
[[1.7880]],
[[1.5911]],
[[1.5043]],
[[1.1502]],
[[0.7137]],
[[1.1110]],
[[0.5384]],
[[0.7693]],
[[1.6035]],
[[1.6604]],
[[2.0208]],
[[1.8558]],
[[1.6920]],
[[1.6681]],
[[1.8327]],
[[1.5870]],
[[2.1664]],
[[2.0867]],
[[2.3344]],
[[1.1064]],
[[1.3543]],
[[0.9008]],
[[1.8218]],
[[1.6797]],
[[1.1941]],
[[0.4644]],
[[1.3092]],
[[0.6743]],
[[1.1384]],
[[0.9490]],
[[1.5491]],
[[0.7560]],
[[1.8132]],
[[0.8340]],
[[1.5390]],
[[1.6744]],
[[1.8625]],
[[0.9652]],
[[1.4268]],
[[1.0822]],
[[1.7262]],
[[2.2794]],
[[1.7862]],
[[1.2506]],
[[0.4508]],
[[1.7207]],
[[2.0964]],
[[1.5769]],
[[1.6713]],
[[0.9934]],
[[0.1822]],
[[0.9832]],
[[1.8133]],
[[1.8303]],
[[2.0275]],
[[0.7105]],
[[1.3715]],
[[0.8596]],
[[1.7125]],
[[1.6826]],
[[1.2259]],
[[1.9190]],
[[1.2980]],
[[0.6037]],
[[0.7373]],
[[1.4898]],
[[1.3623]],
[[1.9320]],
[[0.7359]],
[[1.6070]],
[[1.6316]],
[[1.3468]],
[[1.7825]],
[[2.1140]],
[[1.1996]],
[[1.5901]],
[[1.1244]],
[[1.1557]],
[[1.0290]],
[[0.4252]],
[[1.4393]],
[[1.5539]],
[[1.8765]],
[[1.7218]],
[[1.8937]],
[[1.2533]],
[[1.2879]],
[[1.3027]],
[[1.5384]],
[[2.0879]],
[[1.5996]],
[[1.9929]],
[[0.9881]],
[[1.1909]],
[[1.5380]],
[[1.1467]],
[[0.7973]],
[[1.7031]],
[[1.6155]],
[[1.6204]],
[[0.7208]],
[[2.2081]],
[[1.1866]],
[[1.4514]],
[[0.9699]],
[[1.5697]],
[[1.6798]],
[[1.9416]],
[[1.7314]],
[[2.3270]],
[[1.9132]],
[[1.0379]],
[[1.6392]],
[[0.7746]],
[[1.6664]],
[[1.2166]],
[[1.4319]],
[[1.5962]],
[[1.3833]],
[[1.5583]],
[[1.7912]],
[[1.4596]],
[[1.8995]],
[[1.4560]],
[[1.0095]],
[[1.4579]],
[[0.7385]],
[[1.6651]],
[[1.5183]],
[[0.8704]],
[[1.1307]],
[[1.4524]],
[[1.1183]],
[[2.4874]],
[[1.5766]],
[[0.9196]],
[[1.2723]],
[[1.5352]],
[[1.7223]],
[[2.0826]],
[[2.3103]],
[[2.1243]],
[[1.6828]],
[[1.7389]],
[[2.1379]],
[[1.2024]],
[[1.9716]],
[[0.6523]],
[[1.3751]],
[[1.1216]],
[[1.2052]],
[[1.0293]],
[[2.4467]],
[[0.9611]],
[[0.7155]],
[[1.6284]],
[[1.2166]],
[[0.7672]],
[[0.7800]],
[[1.7803]],
[[0.6429]],
[[1.2454]],
[[1.1297]],
[[1.3791]],
[[0.7693]],
[[0.6952]],
[[0.2589]],
[[1.1585]],
[[1.3116]],
[[0.6902]],
[[1.0277]],
[[0.2492]],
[[1.7297]],
[[1.0724]],
[[1.2789]],
[[0.5272]],
[[1.0690]],
[[1.3449]],
[[0.7402]],
[[1.3475]],
[[0.6096]],
[[0.6339]],
[[1.2282]],
[[0.7492]],
[[1.3978]],
[[0.4481]],
[[0.1412]],
[[1.3362]],
[[1.0955]],
[[0.4766]],
[[0.8054]],
[[1.2084]],
[[1.1421]],
[[1.4807]],
[[1.3952]],
[[1.5139]],
[[1.5270]],
[[1.1961]],
[[0.8209]],
[[1.0058]],
[[1.7108]],
[[1.1903]],
[[1.3433]],
[[0.5702]],
[[1.5378]],
[[0.4359]],
[[0.6303]],
[[0.6149]],
[[1.8986]],
[[1.3364]],
[[0.7588]],
[[1.6761]],
[[1.1060]],
[[1.1357]],
[[0.9788]],
[[1.7734]],
[[0.8210]],
[[0.9710]],
[[1.3724]],
[[1.2377]],
[[0.7511]],
[[1.5959]],
[[1.3024]],
[[1.5548]],
[[0.6445]],
[[0.8528]],
[[0.6512]],
[[1.1111]],
[[0.7650]],
[[1.3583]],
[[1.4054]],
[[1.6184]],
[[0.4463]],
[[1.3501]],
[[0.5423]],
[[0.9391]],
[[0.3914]],
[[0.2065]],
[[0.1858]],
[[1.4510]],
[[1.1555]],
[[1.3982]],
[[1.0966]],
[[1.0152]],
[[0.9288]],
[[0.8125]],
[[0.7316]],
[[1.3856]],
[[0.2485]],
[[0.6886]],
[[0.2138]],
[[1.0915]],
[[1.2278]],
[[1.5855]],
[[1.3821]],
[[1.5365]],
[[1.7579]],
[[0.6860]],
[[1.4527]],
[[1.9743]],
[[0.6926]],
[[1.6018]],
[[1.6256]],
[[1.6717]],
[[0.6167]],
[[0.9098]],
[[0.8157]],
[[0.7175]],
[[0.6118]],
[[0.6548]],
[[0.5115]],
[[1.2242]],
[[1.1406]],
[[1.1412]],
[[0.9965]],
[[1.5876]],
[[1.6619]],
[[1.2432]],
[[0.3719]],
[[1.0525]],
[[1.6241]],
[[0.2874]],
[[1.3563]],
[[0.9927]],
[[1.1460]],
[[1.1103]],
[[0.6717]],
[[0.7468]],
[[1.8900]],
[[1.1137]],
[[1.0431]],
[[0.7742]],
[[0.5460]],
[[1.2807]],
[[0.8604]],
[[1.4356]],
[[0.9726]],
[[1.1067]],
[[0.8686]],
[[0.9767]],
[[1.5341]],
[[0.0505]],
[[0.9923]],
[[0.4056]],
[[0.7895]],
[[1.3153]],
[[0.6910]],
[[0.7475]],
[[1.5040]],
[[0.5602]],
[[0.9777]],
[[0.9259]],
[[1.2705]],
[[0.8208]],
[[1.2260]],
[[0.3255]],
[[0.9669]],
[[0.6989]],
[[1.0978]],
[[1.2834]],
[[0.5981]],
[[1.2174]],
[[0.8165]],
[[0.6857]],
[[0.8698]],
[[0.6423]],
[[1.7604]],
[[1.6310]],
[[1.6786]],
[[1.1189]],
[[0.7443]],
[[1.7580]],
[[0.8313]],
[[0.9021]],
[[0.8011]],
[[0.5256]],
[[0.7266]],
[[0.5864]],
[[0.9959]],
[[0.8732]],
[[0.7662]],
[[1.7564]],
[[1.1258]],
[[0.8769]],
[[0.3532]],
[[1.1575]],
[[1.0183]],
[[1.2150]],
[[1.4583]],
[[0.8874]],
[[1.6400]],
[[0.8229]],
[[0.2676]],
[[1.5155]],
[[0.3332]],
[[1.0228]],
[[0.2706]],
[[0.7631]],
[[0.9096]],
[[1.7062]],
[[0.5169]],
[[1.3513]],
[[1.6301]],
[[1.3423]],
[[0.5803]],
[[1.2377]],
[[1.0897]],
[[1.2745]],
[[1.6742]],
[[0.4751]],
[[1.4730]],
[[1.2369]],
[[1.0011]],
[[1.0953]],
[[1.5688]],
[[1.0690]],
[[0.9844]],
[[1.2125]],
[[0.7603]],
[[1.5376]],
[[1.4567]],
[[0.9513]],
[[1.0860]],
[[1.1686]],
[[1.2674]],
[[1.1347]],
[[1.3529]],
[[1.2792]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 512, 1, 1), torch.float32)], {'model_name': ['pt_vovnet_ese_vovnet39b_obj_det_torchhub', 'pt_vovnet_ese_vovnet19b_dw_obj_det_torchhub', 'pt_vovnet_ese_vovnet99b_obj_det_torchhub']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0bea830>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5c8e9d540>
fw_out = tensor([[[[0.8575]],
[[1.1295]],
[[0.4498]],
[[0.4933]],
[[0.6687]],
...0192]],
[[1.0659]],
[[0.7135]],
[[1.0286]],
[[0.7174]],
[[1.1704]]]])
co_out = tensor([[[[0.8575]],
[[1.2002]],
[[0.1577]],
[[0.8021]],
[[0.4638]],
...0860]],
[[1.1686]],
[[1.2674]],
[[1.1347]],
[[1.3529]],
[[1.2792]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.8575]],
E
E [[1.1295]],
E
E [[0.4498]],
E
E [[0.4933]],
E
E [[0.6687]],
E
E [[0.9954]],
E
E [[0.8514]],
E
E [[1.2577]],
E
E [[0.8169]],
E
E [[0.9936]],
E
E [[0.7102]],
E
E [[0.7630]],
E
E [[0.3836]],
E
E [[0.5301]],
E
E [[0.6552]],
E
E [[0.8798]],
E
E [[1.0589]],
E
E [[1.1613]],
E
E [[0.5223]],
E
E [[0.6435]],
E
E [[1.0429]],
E
E [[1.2765]],
E
E [[0.7584]],
E
E [[1.2354]],
E
E [[0.7807]],
E
E [[0.9142]],
E
E [[1.3140]],
E
E [[0.3974]],
E
E [[0.5465]],
E
E [[0.7347]],
E
E [[0.6664]],
E
E [[1.2933]],
E
E [[0.5372]],
E
E [[0.6311]],
E
E [[0.5120]],
E
E [[0.3930]],
E
E [[0.5694]],
E
E [[1.2911]],
E
E [[1.0844]],
E
E [[1.1036]],
E
E [[0.8876]],
E
E [[0.6049]],
E
E [[0.9459]],
E
E [[0.3944]],
E
E [[0.5000]],
E
E [[0.6035]],
E
E [[1.1767]],
E
E [[1.1544]],
E
E [[0.6395]],
E
E [[0.8432]],
E
E [[1.1811]],
E
E [[1.3583]],
E
E [[1.0597]],
E
E [[0.9288]],
E
E [[1.1965]],
E
E [[0.5669]],
E
E [[0.9544]],
E
E [[0.4736]],
E
E [[0.5147]],
E
E [[0.6030]],
E
E [[1.0875]],
E
E [[1.0624]],
E
E [[0.5651]],
E
E [[1.0123]],
E
E [[1.1358]],
E
E [[0.7982]],
E
E [[0.8804]],
E
E [[0.9771]],
E
E [[1.1715]],
E
E [[1.3414]],
E
E [[0.4760]],
E
E [[0.6780]],
E
E [[1.0578]],
E
E [[1.2755]],
E
E [[1.2964]],
E
E [[1.3025]],
E
E [[0.9608]],
E
E [[0.4265]],
E
E [[0.9073]],
E
E [[0.5485]],
E
E [[0.3953]],
E
E [[1.3055]],
E
E [[1.2415]],
E
E [[0.3625]],
E
E [[0.9549]],
E
E [[0.7770]],
E
E [[0.7790]],
E
E [[0.6324]],
E
E [[1.0536]],
E
E [[0.5651]],
E
E [[1.0446]],
E
E [[1.1141]],
E
E [[1.2192]],
E
E [[1.0482]],
E
E [[0.3664]],
E
E [[0.5369]],
E
E [[1.1109]],
E
E [[0.9659]],
E
E [[0.4712]],
E
E [[0.5734]],
E
E [[1.3316]],
E
E [[1.1982]],
E
E [[0.6433]],
E
E [[0.7354]],
E
E [[0.3850]],
E
E [[0.8523]],
E
E [[0.4847]],
E
E [[0.4756]],
E
E [[0.8337]],
E
E [[0.9363]],
E
E [[0.6565]],
E
E [[1.1580]],
E
E [[0.5570]],
E
E [[1.3150]],
E
E [[1.2039]],
E
E [[0.4396]],
E
E [[0.7368]],
E
E [[0.8838]],
E
E [[0.9342]],
E
E [[0.9799]],
E
E [[1.0575]],
E
E [[0.8912]],
E
E [[0.6173]],
E
E [[1.0979]],
E
E [[0.3816]],
E
E [[0.5649]],
E
E [[0.7361]],
E
E [[0.6177]],
E
E [[0.6864]],
E
E [[0.4515]],
E
E [[0.7549]],
E
E [[0.9682]],
E
E [[0.5355]],
E
E [[0.8356]],
E
E [[1.2192]],
E
E [[0.8099]],
E
E [[0.8752]],
E
E [[0.8181]],
E
E [[0.9625]],
E
E [[1.1792]],
E
E [[1.3349]],
E
E [[1.1788]],
E
E [[1.3360]],
E
E [[0.8251]],
E
E [[0.4121]],
E
E [[0.6242]],
E
E [[1.2017]],
E
E [[0.8580]],
E
E [[0.6127]],
E
E [[0.4781]],
E
E [[0.3933]],
E
E [[0.4393]],
E
E [[0.7599]],
E
E [[1.1355]],
E
E [[1.1316]],
E
E [[0.3791]],
E
E [[1.1732]],
E
E [[0.4700]],
E
E [[0.7556]],
E
E [[0.6585]],
E
E [[0.7650]],
E
E [[0.7631]],
E
E [[0.4126]],
E
E [[0.4296]],
E
E [[0.7830]],
E
E [[0.8677]],
E
E [[0.6341]],
E
E [[1.0496]],
E
E [[0.4112]],
E
E [[0.8275]],
E
E [[1.3010]],
E
E [[0.6573]],
E
E [[1.3128]],
E
E [[1.0423]],
E
E [[0.4100]],
E
E [[1.1776]],
E
E [[0.8036]],
E
E [[0.6381]],
E
E [[1.2611]],
E
E [[0.4572]],
E
E [[0.9149]],
E
E [[0.7566]],
E
E [[1.2183]],
E
E [[1.0008]],
E
E [[1.1015]],
E
E [[1.0379]],
E
E [[0.7410]],
E
E [[0.7561]],
E
E [[0.4492]],
E
E [[1.1322]],
E
E [[1.2583]],
E
E [[1.2034]],
E
E [[0.5086]],
E
E [[0.8836]],
E
E [[0.5088]],
E
E [[0.5860]],
E
E [[0.5699]],
E
E [[1.0321]],
E
E [[0.5633]],
E
E [[0.8504]],
E
E [[0.8823]],
E
E [[1.1836]],
E
E [[0.4833]],
E
E [[0.5180]],
E
E [[0.5709]],
E
E [[1.2112]],
E
E [[0.6815]],
E
E [[1.2830]],
E
E [[1.0421]],
E
E [[0.9246]],
E
E [[0.8576]],
E
E [[0.7624]],
E
E [[0.9240]],
E
E [[0.7471]],
E
E [[0.8578]],
E
E [[0.9251]],
E
E [[0.4702]],
E
E [[0.5992]],
E
E [[1.2650]],
E
E [[0.4555]],
E
E [[0.8254]],
E
E [[1.3559]],
E
E [[1.0419]],
E
E [[0.8754]],
E
E [[0.4280]],
E
E [[1.1090]],
E
E [[0.5051]],
E
E [[0.7193]],
E
E [[0.6935]],
E
E [[0.7872]],
E
E [[0.8667]],
E
E [[1.2737]],
E
E [[0.9237]],
E
E [[1.3091]],
E
E [[1.1671]],
E
E [[0.5452]],
E
E [[1.0855]],
E
E [[0.5078]],
E
E [[0.6494]],
E
E [[1.0083]],
E
E [[1.0264]],
E
E [[1.2364]],
E
E [[0.7003]],
E
E [[0.8621]],
E
E [[1.1187]],
E
E [[0.3777]],
E
E [[1.2228]],
E
E [[0.4478]],
E
E [[0.8682]],
E
E [[0.7763]],
E
E [[0.5979]],
E
E [[0.9274]],
E
E [[1.2747]],
E
E [[0.7151]],
E
E [[0.5644]],
E
E [[0.6764]],
E
E [[0.3657]],
E
E [[1.0870]],
E
E [[0.6211]],
E
E [[0.5276]],
E
E [[0.5732]],
E
E [[1.1488]],
E
E [[1.1261]],
E
E [[1.2450]],
E
E [[1.0426]],
E
E [[0.6943]],
E
E [[0.7215]],
E
E [[1.0090]],
E
E [[1.2723]],
E
E [[0.9972]],
E
E [[0.6247]],
E
E [[0.6262]],
E
E [[0.3885]],
E
E [[0.9693]],
E
E [[0.5807]],
E
E [[0.4155]],
E
E [[1.2997]],
E
E [[0.5366]],
E
E [[0.8044]],
E
E [[1.0045]],
E
E [[0.8772]],
E
E [[0.5248]],
E
E [[0.4571]],
E
E [[1.2598]],
E
E [[0.9427]],
E
E [[1.2761]],
E
E [[0.6937]],
E
E [[1.0086]],
E
E [[0.7469]],
E
E [[0.8390]],
E
E [[0.5568]],
E
E [[1.0304]],
E
E [[1.0194]],
E
E [[0.8510]],
E
E [[0.7488]],
E
E [[0.5531]],
E
E [[1.2070]],
E
E [[0.4891]],
E
E [[1.0661]],
E
E [[0.6931]],
E
E [[0.6200]],
E
E [[0.9511]],
E
E [[0.6015]],
E
E [[0.9765]],
E
E [[0.9595]],
E
E [[0.4900]],
E
E [[0.9445]],
E
E [[1.0742]],
E
E [[1.0592]],
E
E [[0.7983]],
E
E [[0.4514]],
E
E [[0.7842]],
E
E [[1.0349]],
E
E [[0.6788]],
E
E [[1.0511]],
E
E [[1.1943]],
E
E [[0.6002]],
E
E [[0.8662]],
E
E [[1.0680]],
E
E [[0.9005]],
E
E [[0.9030]],
E
E [[0.9237]],
E
E [[0.4682]],
E
E [[0.9006]],
E
E [[1.2075]],
E
E [[1.3118]],
E
E [[1.1551]],
E
E [[0.9283]],
E
E [[1.0948]],
E
E [[0.6180]],
E
E [[0.4469]],
E
E [[0.4313]],
E
E [[1.3601]],
E
E [[1.1787]],
E
E [[0.5157]],
E
E [[1.0569]],
E
E [[1.2389]],
E
E [[1.3611]],
E
E [[1.2985]],
E
E [[1.2486]],
E
E [[0.7466]],
E
E [[0.6858]],
E
E [[1.2718]],
E
E [[1.1415]],
E
E [[0.5604]],
E
E [[1.3108]],
E
E [[1.1029]],
E
E [[1.1338]],
E
E [[0.5479]],
E
E [[1.0047]],
E
E [[0.6860]],
E
E [[1.2520]],
E
E [[0.7713]],
E
E [[1.0559]],
E
E [[0.9501]],
E
E [[1.0740]],
E
E [[0.6914]],
E
E [[1.1051]],
E
E [[0.5120]],
E
E [[0.9742]],
E
E [[0.5230]],
E
E [[0.3680]],
E
E [[0.4597]],
E
E [[1.2560]],
E
E [[1.1318]],
E
E [[1.3304]],
E
E [[1.2618]],
E
E [[0.4147]],
E
E [[0.5201]],
E
E [[0.7805]],
E
E [[0.5366]],
E
E [[1.2085]],
E
E [[0.4833]],
E
E [[0.6173]],
E
E [[0.3782]],
E
E [[0.5774]],
E
E [[1.2725]],
E
E [[1.2707]],
E
E [[1.2192]],
E
E [[1.2473]],
E
E [[1.3059]],
E
E [[0.7332]],
E
E [[1.0813]],
E
E [[1.3067]],
E
E [[1.0267]],
E
E [[1.3611]],
E
E [[1.1206]],
E
E [[1.1721]],
E
E [[0.6863]],
E
E [[1.1012]],
E
E [[0.9187]],
E
E [[0.7419]],
E
E [[0.5794]],
E
E [[0.5807]],
E
E [[0.4765]],
E
E [[1.1969]],
E
E [[1.2167]],
E
E [[0.8044]],
E
E [[0.5719]],
E
E [[1.2477]],
E
E [[1.1810]],
E
E [[0.8984]],
E
E [[0.6252]],
E
E [[1.3208]],
E
E [[1.0657]],
E
E [[0.4817]],
E
E [[1.3398]],
E
E [[1.2410]],
E
E [[0.6790]],
E
E [[1.1423]],
E
E [[0.5772]],
E
E [[0.7829]],
E
E [[1.2858]],
E
E [[0.8819]],
E
E [[0.5077]],
E
E [[0.6942]],
E
E [[0.7256]],
E
E [[0.7648]],
E
E [[0.9091]],
E
E [[1.3237]],
E
E [[0.8880]],
E
E [[0.5526]],
E
E [[0.8869]],
E
E [[1.1010]],
E
E [[1.1093]],
E
E [[0.4043]],
E
E [[0.7718]],
E
E [[0.4897]],
E
E [[0.6479]],
E
E [[1.0414]],
E
E [[0.5062]],
E
E [[1.0471]],
E
E [[1.2857]],
E
E [[0.8941]],
E
E [[0.5280]],
E
E [[0.6821]],
E
E [[0.9705]],
E
E [[0.4801]],
E
E [[1.1097]],
E
E [[0.4073]],
E
E [[0.3806]],
E
E [[0.3754]],
E
E [[0.7598]],
E
E [[1.1975]],
E
E [[0.3880]],
E
E [[1.2769]],
E
E [[0.6613]],
E
E [[1.0077]],
E
E [[0.8841]],
E
E [[0.4104]],
E
E [[1.2759]],
E
E [[1.1305]],
E
E [[1.3583]],
E
E [[1.1139]],
E
E [[0.5312]],
E
E [[1.2786]],
E
E [[0.8881]],
E
E [[1.0984]],
E
E [[0.4604]],
E
E [[0.7175]],
E
E [[0.3703]],
E
E [[0.6665]],
E
E [[0.9691]],
E
E [[0.4687]],
E
E [[1.0207]],
E
E [[1.1297]],
E
E [[0.9309]],
E
E [[0.5267]],
E
E [[0.4736]],
E
E [[0.7070]],
E
E [[1.0808]],
E
E [[1.3545]],
E
E [[1.1488]],
E
E [[0.8050]],
E
E [[1.0366]],
E
E [[0.3707]],
E
E [[0.4342]],
E
E [[1.0946]],
E
E [[0.5781]],
E
E [[1.1018]],
E
E [[0.5083]],
E
E [[0.6136]],
E
E [[0.4494]],
E
E [[1.1222]],
E
E [[0.8103]],
E
E [[1.2461]],
E
E [[1.1707]],
E
E [[1.1379]],
E
E [[0.8774]],
E
E [[0.7067]],
E
E [[0.7526]],
E
E [[0.9277]],
E
E [[1.1091]],
E
E [[0.5110]],
E
E [[1.2809]],
E
E [[0.8069]],
E
E [[0.4423]],
E
E [[0.5907]],
E
E [[1.3037]],
E
E [[1.3185]],
E
E [[0.3981]],
E
E [[1.2139]],
E
E [[1.1119]],
E
E [[1.1572]],
E
E [[1.2845]],
E
E [[0.5918]],
E
E [[1.0192]],
E
E [[1.0659]],
E
E [[0.7135]],
E
E [[1.0286]],
E
E [[0.7174]],
E
E [[1.1704]]]]), compiled_model=tensor([[[[0.8575]],
E
E [[1.2002]],
E
E [[0.1577]],
E
E [[0.8021]],
E
E [[0.4638]],
E
E [[0.9231]],
E
E [[0.9660]],
E
E [[1.1812]],
E
E [[1.0998]],
E
E [[1.6226]],
E
E [[1.0790]],
E
E [[0.7036]],
E
E [[0.1202]],
E
E [[0.4158]],
E
E [[0.6009]],
E
E [[1.1999]],
E
E [[1.4442]],
E
E [[1.4049]],
E
E [[0.6396]],
E
E [[0.7371]],
E
E [[0.9748]],
E
E [[1.3476]],
E
E [[0.6026]],
E
E [[1.6758]],
E
E [[0.9201]],
E
E [[1.6363]],
E
E [[1.0638]],
E
E [[0.8038]],
E
E [[0.7667]],
E
E [[1.7484]],
E
E [[1.2353]],
E
E [[2.7252]],
E
E [[1.5434]],
E
E [[1.6307]],
E
E [[0.8564]],
E
E [[0.8285]],
E
E [[0.2373]],
E
E [[1.5824]],
E
E [[1.7798]],
E
E [[1.4600]],
E
E [[2.0426]],
E
E [[1.0916]],
E
E [[1.6656]],
E
E [[0.3212]],
E
E [[0.9633]],
E
E [[1.9122]],
E
E [[1.2769]],
E
E [[2.1030]],
E
E [[1.0892]],
E
E [[1.6572]],
E
E [[2.7426]],
E
E [[2.0276]],
E
E [[1.6980]],
E
E [[1.7462]],
E
E [[1.9403]],
E
E [[1.7831]],
E
E [[1.0714]],
E
E [[1.3630]],
E
E [[1.1965]],
E
E [[0.6123]],
E
E [[1.0096]],
E
E [[2.2375]],
E
E [[1.6383]],
E
E [[1.6874]],
E
E [[1.3326]],
E
E [[1.5912]],
E
E [[1.2050]],
E
E [[1.2450]],
E
E [[1.9292]],
E
E [[1.6958]],
E
E [[1.0851]],
E
E [[1.9854]],
E
E [[1.0267]],
E
E [[2.3634]],
E
E [[2.1384]],
E
E [[2.6499]],
E
E [[2.2374]],
E
E [[0.8964]],
E
E [[1.5039]],
E
E [[1.2770]],
E
E [[0.7712]],
E
E [[1.4751]],
E
E [[1.7289]],
E
E [[1.1020]],
E
E [[1.7010]],
E
E [[1.7676]],
E
E [[1.1108]],
E
E [[1.0409]],
E
E [[2.2923]],
E
E [[0.8787]],
E
E [[2.0869]],
E
E [[1.5024]],
E
E [[1.7239]],
E
E [[2.2459]],
E
E [[0.7708]],
E
E [[0.7545]],
E
E [[1.5475]],
E
E [[2.2443]],
E
E [[2.0407]],
E
E [[1.7611]],
E
E [[1.5898]],
E
E [[1.0761]],
E
E [[1.7566]],
E
E [[0.6767]],
E
E [[0.5698]],
E
E [[1.9974]],
E
E [[1.2182]],
E
E [[0.3628]],
E
E [[1.2725]],
E
E [[1.4451]],
E
E [[1.4691]],
E
E [[1.1292]],
E
E [[1.4611]],
E
E [[2.0729]],
E
E [[1.9016]],
E
E [[1.4917]],
E
E [[1.7973]],
E
E [[2.1308]],
E
E [[1.0007]],
E
E [[1.3220]],
E
E [[2.2171]],
E
E [[1.6021]],
E
E [[0.4502]],
E
E [[1.5795]],
E
E [[1.7880]],
E
E [[1.5911]],
E
E [[1.5043]],
E
E [[1.1502]],
E
E [[0.7137]],
E
E [[1.1110]],
E
E [[0.5384]],
E
E [[0.7693]],
E
E [[1.6035]],
E
E [[1.6604]],
E
E [[2.0208]],
E
E [[1.8558]],
E
E [[1.6920]],
E
E [[1.6681]],
E
E [[1.8327]],
E
E [[1.5870]],
E
E [[2.1664]],
E
E [[2.0867]],
E
E [[2.3344]],
E
E [[1.1064]],
E
E [[1.3543]],
E
E [[0.9008]],
E
E [[1.8218]],
E
E [[1.6797]],
E
E [[1.1941]],
E
E [[0.4644]],
E
E [[1.3092]],
E
E [[0.6743]],
E
E [[1.1384]],
E
E [[0.9490]],
E
E [[1.5491]],
E
E [[0.7560]],
E
E [[1.8132]],
E
E [[0.8340]],
E
E [[1.5390]],
E
E [[1.6744]],
E
E [[1.8625]],
E
E [[0.9652]],
E
E [[1.4268]],
E
E [[1.0822]],
E
E [[1.7262]],
E
E [[2.2794]],
E
E [[1.7862]],
E
E [[1.2506]],
E
E [[0.4508]],
E
E [[1.7207]],
E
E [[2.0964]],
E
E [[1.5769]],
E
E [[1.6713]],
E
E [[0.9934]],
E
E [[0.1822]],
E
E [[0.9832]],
E
E [[1.8133]],
E
E [[1.8303]],
E
E [[2.0275]],
E
E [[0.7105]],
E
E [[1.3715]],
E
E [[0.8596]],
E
E [[1.7125]],
E
E [[1.6826]],
E
E [[1.2259]],
E
E [[1.9190]],
E
E [[1.2980]],
E
E [[0.6037]],
E
E [[0.7373]],
E
E [[1.4898]],
E
E [[1.3623]],
E
E [[1.9320]],
E
E [[0.7359]],
E
E [[1.6070]],
E
E [[1.6316]],
E
E [[1.3468]],
E
E [[1.7825]],
E
E [[2.1140]],
E
E [[1.1996]],
E
E [[1.5901]],
E
E [[1.1244]],
E
E [[1.1557]],
E
E [[1.0290]],
E
E [[0.4252]],
E
E [[1.4393]],
E
E [[1.5539]],
E
E [[1.8765]],
E
E [[1.7218]],
E
E [[1.8937]],
E
E [[1.2533]],
E
E [[1.2879]],
E
E [[1.3027]],
E
E [[1.5384]],
E
E [[2.0879]],
E
E [[1.5996]],
E
E [[1.9929]],
E
E [[0.9881]],
E
E [[1.1909]],
E
E [[1.5380]],
E
E [[1.1467]],
E
E [[0.7973]],
E
E [[1.7031]],
E
E [[1.6155]],
E
E [[1.6204]],
E
E [[0.7208]],
E
E [[2.2081]],
E
E [[1.1866]],
E
E [[1.4514]],
E
E [[0.9699]],
E
E [[1.5697]],
E
E [[1.6798]],
E
E [[1.9416]],
E
E [[1.7314]],
E
E [[2.3270]],
E
E [[1.9132]],
E
E [[1.0379]],
E
E [[1.6392]],
E
E [[0.7746]],
E
E [[1.6664]],
E
E [[1.2166]],
E
E [[1.4319]],
E
E [[1.5962]],
E
E [[1.3833]],
E
E [[1.5583]],
E
E [[1.7912]],
E
E [[1.4596]],
E
E [[1.8995]],
E
E [[1.4560]],
E
E [[1.0095]],
E
E [[1.4579]],
E
E [[0.7385]],
E
E [[1.6651]],
E
E [[1.5183]],
E
E [[0.8704]],
E
E [[1.1307]],
E
E [[1.4524]],
E
E [[1.1183]],
E
E [[2.4874]],
E
E [[1.5766]],
E
E [[0.9196]],
E
E [[1.2723]],
E
E [[1.5352]],
E
E [[1.7223]],
E
E [[2.0826]],
E
E [[2.3103]],
E
E [[2.1243]],
E
E [[1.6828]],
E
E [[1.7389]],
E
E [[2.1379]],
E
E [[1.2024]],
E
E [[1.9716]],
E
E [[0.6523]],
E
E [[1.3751]],
E
E [[1.1216]],
E
E [[1.2052]],
E
E [[1.0293]],
E
E [[2.4467]],
E
E [[0.9611]],
E
E [[0.7155]],
E
E [[1.6284]],
E
E [[1.2166]],
E
E [[0.7672]],
E
E [[0.7800]],
E
E [[1.7803]],
E
E [[0.6429]],
E
E [[1.2454]],
E
E [[1.1297]],
E
E [[1.3791]],
E
E [[0.7693]],
E
E [[0.6952]],
E
E [[0.2589]],
E
E [[1.1585]],
E
E [[1.3116]],
E
E [[0.6902]],
E
E [[1.0277]],
E
E [[0.2492]],
E
E [[1.7297]],
E
E [[1.0724]],
E
E [[1.2789]],
E
E [[0.5272]],
E
E [[1.0690]],
E
E [[1.3449]],
E
E [[0.7402]],
E
E [[1.3475]],
E
E [[0.6096]],
E
E [[0.6339]],
E
E [[1.2282]],
E
E [[0.7492]],
E
E [[1.3978]],
E
E [[0.4481]],
E
E [[0.1412]],
E
E [[1.3362]],
E
E [[1.0955]],
E
E [[0.4766]],
E
E [[0.8054]],
E
E [[1.2084]],
E
E [[1.1421]],
E
E [[1.4807]],
E
E [[1.3952]],
E
E [[1.5139]],
E
E [[1.5270]],
E
E [[1.1961]],
E
E [[0.8209]],
E
E [[1.0058]],
E
E [[1.7108]],
E
E [[1.1903]],
E
E [[1.3433]],
E
E [[0.5702]],
E
E [[1.5378]],
E
E [[0.4359]],
E
E [[0.6303]],
E
E [[0.6149]],
E
E [[1.8986]],
E
E [[1.3364]],
E
E [[0.7588]],
E
E [[1.6761]],
E
E [[1.1060]],
E
E [[1.1357]],
E
E [[0.9788]],
E
E [[1.7734]],
E
E [[0.8210]],
E
E [[0.9710]],
E
E [[1.3724]],
E
E [[1.2377]],
E
E [[0.7511]],
E
E [[1.5959]],
E
E [[1.3024]],
E
E [[1.5548]],
E
E [[0.6445]],
E
E [[0.8528]],
E
E [[0.6512]],
E
E [[1.1111]],
E
E [[0.7650]],
E
E [[1.3583]],
E
E [[1.4054]],
E
E [[1.6184]],
E
E [[0.4463]],
E
E [[1.3501]],
E
E [[0.5423]],
E
E [[0.9391]],
E
E [[0.3914]],
E
E [[0.2065]],
E
E [[0.1858]],
E
E [[1.4510]],
E
E [[1.1555]],
E
E [[1.3982]],
E
E [[1.0966]],
E
E [[1.0152]],
E
E [[0.9288]],
E
E [[0.8125]],
E
E [[0.7316]],
E
E [[1.3856]],
E
E [[0.2485]],
E
E [[0.6886]],
E
E [[0.2138]],
E
E [[1.0915]],
E
E [[1.2278]],
E
E [[1.5855]],
E
E [[1.3821]],
E
E [[1.5365]],
E
E [[1.7579]],
E
E [[0.6860]],
E
E [[1.4527]],
E
E [[1.9743]],
E
E [[0.6926]],
E
E [[1.6018]],
E
E [[1.6256]],
E
E [[1.6717]],
E
E [[0.6167]],
E
E [[0.9098]],
E
E [[0.8157]],
E
E [[0.7175]],
E
E [[0.6118]],
E
E [[0.6548]],
E
E [[0.5115]],
E
E [[1.2242]],
E
E [[1.1406]],
E
E [[1.1412]],
E
E [[0.9965]],
E
E [[1.5876]],
E
E [[1.6619]],
E
E [[1.2432]],
E
E [[0.3719]],
E
E [[1.0525]],
E
E [[1.6241]],
E
E [[0.2874]],
E
E [[1.3563]],
E
E [[0.9927]],
E
E [[1.1460]],
E
E [[1.1103]],
E
E [[0.6717]],
E
E [[0.7468]],
E
E [[1.8900]],
E
E [[1.1137]],
E
E [[1.0431]],
E
E [[0.7742]],
E
E [[0.5460]],
E
E [[1.2807]],
E
E [[0.8604]],
E
E [[1.4356]],
E
E [[0.9726]],
E
E [[1.1067]],
E
E [[0.8686]],
E
E [[0.9767]],
E
E [[1.5341]],
E
E [[0.0505]],
E
E [[0.9923]],
E
E [[0.4056]],
E
E [[0.7895]],
E
E [[1.3153]],
E
E [[0.6910]],
E
E [[0.7475]],
E
E [[1.5040]],
E
E [[0.5602]],
E
E [[0.9777]],
E
E [[0.9259]],
E
E [[1.2705]],
E
E [[0.8208]],
E
E [[1.2260]],
E
E [[0.3255]],
E
E [[0.9669]],
E
E [[0.6989]],
E
E [[1.0978]],
E
E [[1.2834]],
E
E [[0.5981]],
E
E [[1.2174]],
E
E [[0.8165]],
E
E [[0.6857]],
E
E [[0.8698]],
E
E [[0.6423]],
E
E [[1.7604]],
E
E [[1.6310]],
E
E [[1.6786]],
E
E [[1.1189]],
E
E [[0.7443]],
E
E [[1.7580]],
E
E [[0.8313]],
E
E [[0.9021]],
E
E [[0.8011]],
E
E [[0.5256]],
E
E [[0.7266]],
E
E [[0.5864]],
E
E [[0.9959]],
E
E [[0.8732]],
E
E [[0.7662]],
E
E [[1.7564]],
E
E [[1.1258]],
E
E [[0.8769]],
E
E [[0.3532]],
E
E [[1.1575]],
E
E [[1.0183]],
E
E [[1.2150]],
E
E [[1.4583]],
E
E [[0.8874]],
E
E [[1.6400]],
E
E [[0.8229]],
E
E [[0.2676]],
E
E [[1.5155]],
E
E [[0.3332]],
E
E [[1.0228]],
E
E [[0.2706]],
E
E [[0.7631]],
E
E [[0.9096]],
E
E [[1.7062]],
E
E [[0.5169]],
E
E [[1.3513]],
E
E [[1.6301]],
E
E [[1.3423]],
E
E [[0.5803]],
E
E [[1.2377]],
E
E [[1.0897]],
E
E [[1.2745]],
E
E [[1.6742]],
E
E [[0.4751]],
E
E [[1.4730]],
E
E [[1.2369]],
E
E [[1.0011]],
E
E [[1.0953]],
E
E [[1.5688]],
E
E [[1.0690]],
E
E [[0.9844]],
E
E [[1.2125]],
E
E [[0.7603]],
E
E [[1.5376]],
E
E [[1.4567]],
E
E [[0.9513]],
E
E [[1.0860]],
E
E [[1.1686]],
E
E [[1.2674]],
E
E [[1.1347]],
E
E [[1.3529]],
E
E [[1.2792]]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError
Check failure on line 18561 in forge/test/models_ops/test_add.py
github-actions / TT-Forge-FE Tests
test_add.test_module[Add1-[((1, 768, 1, 1), torch.float32)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.6126]],
[[0.8846]],
[[0.2049]],
[[0.2484]],
[[0.4238]],
[[0.7505]],
[[0.6065]],
[[1.0128]],
[[0.5720]],
[[0.7487]],
[[0.4653]],
[[0.5181]],
[[0.1387]],
[[0.2852]],
[[0.4103]],
[[0.6349]],
[[0.8141]],
[[0.9164]],
[[0.2774]],
[[0.3987]],
[[0.7980]],
[[1.0316]],
[[0.5135]],
[[0.9905]],
[[0.5358]],
[[0.6693]],
[[1.0691]],
[[0.1525]],
[[0.3016]],
[[0.4898]],
[[0.4215]],
[[1.0484]],
[[0.2923]],
[[0.3862]],
[[0.2671]],
[[0.1481]],
[[0.3245]],
[[1.0462]],
[[0.8395]],
[[0.8587]],
[[0.6427]],
[[0.3600]],
[[0.7010]],
[[0.1495]],
[[0.2551]],
[[0.3586]],
[[0.9319]],
[[0.9095]],
[[0.3946]],
[[0.5983]],
[[0.9362]],
[[1.1134]],
[[0.8148]],
[[0.6839]],
[[0.9516]],
[[0.3220]],
[[0.7096]],
[[0.2287]],
[[0.2698]],
[[0.3581]],
[[0.8426]],
[[0.8175]],
[[0.3202]],
[[0.7674]],
[[0.8909]],
[[0.5533]],
[[0.6355]],
[[0.7322]],
[[0.9266]],
[[1.0965]],
[[0.2311]],
[[0.4331]],
[[0.8129]],
[[1.0307]],
[[1.0515]],
[[1.0576]],
[[0.7159]],
[[0.1816]],
[[0.6624]],
[[0.3036]],
[[0.1504]],
[[1.0606]],
[[0.9966]],
[[0.1176]],
[[0.7100]],
[[0.5322]],
[[0.5341]],
[[0.3875]],
[[0.8087]],
[[0.3202]],
[[0.7997]],
[[0.8692]],
[[0.9743]],
[[0.8033]],
[[0.1215]],
[[0.2920]],
[[0.8660]],
[[0.7210]],
[[0.2263]],
[[0.3285]],
[[1.0868]],
[[0.9533]],
[[0.3984]],
[[0.4905]],
[[0.1401]],
[[0.6074]],
[[0.2399]],
[[0.2307]],
[[0.5888]],
[[0.6915]],
[[0.4116]],
[[0.9131]],
[[0.3121]],
[[1.0701]],
[[0.9590]],
[[0.1947]],
[[0.4919]],
[[0.6389]],
[[0.6893]],
[[0.7350]],
[[0.8126]],
[[0.6463]],
[[0.3724]],
[[0.8530]],
[[0.1368]],
[[0.3200]],
[[0.4912]],
[[0.3728]],
[[0.4415]],
[[0.2066]],
[[0.5100]],
[[0.7233]],
[[0.2906]],
[[0.5907]],
[[0.9743]],
[[0.5650]],
[[0.6303]],
[[0.5732]],
[[0.7176]],
[[0.9343]],
[[1.0900]],
[[0.9339]],
[[1.0911]],
[[0.5802]],
[[0.1672]],
[[0.3793]],
[[0.9568]],
[[0.6131]],
[[0.3679]],
[[0.2332]],
[[0.1485]],
[[0.1944]],
[[0.5150]],
[[0.8906]],
[[0.8867]],
[[0.1342]],
[[0.9283]],
[[0.2251]],
[[0.5107]],
[[0.4136]],
[[0.5201]],
[[0.5182]],
[[0.1677]],
[[0.1847]],
[[0.5381]],
[[0.6228]],
[[0.3892]],
[[0.8047]],
[[0.1664]],
[[0.5826]],
[[1.0561]],
[[0.4124]],
[[1.0679]],
[[0.7975]],
[[0.1652]],
[[0.9327]],
[[0.5587]],
[[0.3932]],
[[1.0162]],
[[0.2123]],
[[0.6700]],
[[0.5117]],
[[0.9734]],
[[0.7560]],
[[0.8566]],
[[0.7930]],
[[0.4961]],
[[0.5112]],
[[0.2043]],
[[0.8873]],
[[1.0134]],
[[0.9585]],
[[0.2637]],
[[0.6387]],
[[0.2639]],
[[0.3411]],
[[0.3250]],
[[0.7873]],
[[0.3184]],
[[0.6055]],
[[0.6374]],
[[0.9387]],
[[0.2384]],
[[0.2731]],
[[0.3261]],
[[0.9663]],
[[0.4367]],
[[1.0381]],
[[0.7972]],
[[0.6797]],
[[0.6127]],
[[0.5175]],
[[0.6791]],
[[0.5022]],
[[0.6129]],
[[0.6802]],
[[0.2253]],
[[0.3543]],
[[1.0201]],
[[0.2106]],
[[0.5805]],
[[1.1110]],
[[0.7970]],
[[0.6305]],
[[0.1831]],
[[0.8641]],
[[0.2602]],
[[0.4745]],
[[0.4486]],
[[0.5423]],
[[0.6219]],
[[1.0288]],
[[0.6788]],
[[1.0642]],
[[0.9222]],
[[0.3003]],
[[0.8406]],
[[0.2629]],
[[0.4045]],
[[0.7634]],
[[0.7815]],
[[0.9915]],
[[0.4554]],
[[0.6172]],
[[0.8738]],
[[0.1328]],
[[0.9779]],
[[0.2029]],
[[0.6233]],
[[0.5314]],
[[0.3530]],
[[0.6825]],
[[1.0298]],
[[0.4702]],
[[0.3195]],
[[0.4315]],
[[0.1208]],
[[0.8421]],
[[0.3763]],
[[0.2827]],
[[0.3283]],
[[0.9039]],
[[0.8812]],
[[1.0001]],
[[0.7977]],
[[0.4494]],
[[0.4767]],
[[0.7641]],
[[1.0274]],
[[0.7523]],
[[0.3798]],
[[0.3813]],
[[0.1436]],
[[0.7244]],
[[0.3358]],
[[0.1706]],
[[1.0548]],
[[0.2917]],
[[0.5595]],
[[0.7596]],
[[0.6323]],
[[0.2799]],
[[0.2122]],
[[1.0149]],
[[0.6978]],
[[1.0312]],
[[0.4488]],
[[0.7637]],
[[0.5020]],
[[0.5941]],
[[0.3119]],
[[0.7855]],
[[0.7745]],
[[0.6061]],
[[0.5039]],
[[0.3082]],
[[0.9622]],
[[0.2442]],
[[0.8212]],
[[0.4483]],
[[0.3751]],
[[0.7062]],
[[0.3567]],
[[0.7316]],
[[0.7146]],
[[0.2451]],
[[0.6996]],
[[0.8293]],
[[0.8143]],
[[0.5534]],
[[0.2065]],
[[0.5393]],
[[0.7900]],
[[0.4339]],
[[0.8062]],
[[0.9494]],
[[0.3553]],
[[0.6213]],
[[0.8231]],
[[0.6556]],
[[0.6581]],
[[0.6788]],
[[0.2233]],
[[0.6557]],
[[0.9626]],
[[1.0669]],
[[0.9103]],
[[0.6834]],
[[0.8499]],
[[0.3731]],
[[0.2020]],
[[0.1864]],
[[1.1152]],
[[0.9338]],
[[0.2708]],
[[0.8120]],
[[0.9940]],
[[1.1162]],
[[1.0536]],
[[1.0038]],
[[0.5018]],
[[0.4409]],
[[1.0269]],
[[0.8966]],
[[0.3155]],
[[1.0659]],
[[0.8580]],
[[0.8890]],
[[0.3030]],
[[0.7598]],
[[0.4411]],
[[1.0071]],
[[0.5264]],
[[0.8110]],
[[0.7052]],
[[0.8291]],
[[0.4465]],
[[0.8602]],
[[0.2671]],
[[0.7293]],
[[0.2781]],
[[0.1231]],
[[0.2149]],
[[1.0111]],
[[0.8869]],
[[1.0855]],
[[1.0169]],
[[0.1699]],
[[0.2752]],
[[0.5356]],
[[0.2917]],
[[0.9636]],
[[0.2384]],
[[0.3724]],
[[0.1333]],
[[0.3325]],
[[1.0276]],
[[1.0258]],
[[0.9743]],
[[1.0024]],
[[1.0610]],
[[0.4884]],
[[0.8364]],
[[1.0618]],
[[0.7818]],
[[1.1162]],
[[0.8757]],
[[0.9272]],
[[0.4414]],
[[0.8563]],
[[0.6738]],
[[0.4970]],
[[0.3345]],
[[0.3358]],
[[0.2316]],
[[0.9520]],
[[0.9718]],
[[0.5595]],
[[0.3270]],
[[1.0028]],
[[0.9361]],
[[0.6535]],
[[0.3803]],
[[1.0759]],
[[0.8209]],
[[0.2368]],
[[1.0949]],
[[0.9961]],
[[0.4341]],
[[0.8975]],
[[0.3323]],
[[0.5380]],
[[1.0409]],
[[0.6370]],
[[0.2628]],
[[0.4493]],
[[0.4807]],
[[0.5199]],
[[0.6642]],
[[1.0788]],
[[0.6432]],
[[0.3077]],
[[0.6420]],
[[0.8561]],
[[0.8644]],
[[0.1594]],
[[0.5269]],
[[0.2448]],
[[0.4030]],
[[0.7965]],
[[0.2613]],
[[0.8022]],
[[1.0408]],
[[0.6492]],
[[0.2831]],
[[0.4372]],
[[0.7256]],
[[0.2352]],
[[0.8648]],
[[0.1624]],
[[0.1357]],
[[0.1306]],
[[0.5150]],
[[0.9526]],
[[0.1431]],
[[1.0320]],
[[0.4164]],
[[0.7628]],
[[0.6392]],
[[0.1655]],
[[1.0310]],
[[0.8856]],
[[1.1134]],
[[0.8690]],
[[0.2863]],
[[1.0337]],
[[0.6433]],
[[0.8535]],
[[0.2155]],
[[0.4726]],
[[0.1254]],
[[0.4216]],
[[0.7242]],
[[0.2238]],
[[0.7758]],
[[0.8848]],
[[0.6860]],
[[0.2818]],
[[0.2287]],
[[0.4621]],
[[0.8359]],
[[1.1096]],
[[0.9039]],
[[0.5601]],
[[0.7917]],
[[0.1259]],
[[0.1893]],
[[0.8497]],
[[0.3332]],
[[0.8569]],
[[0.2634]],
[[0.3687]],
[[0.2045]],
[[0.8773]],
[[0.5654]],
[[1.0012]],
[[0.9258]],
[[0.8931]],
[[0.6325]],
[[0.4618]],
[[0.5077]],
[[0.6828]],
[[0.8642]],
[[0.2661]],
[[1.0360]],
[[0.5620]],
[[0.1974]],
[[0.3459]],
[[1.0588]],
[[1.0736]],
[[0.1532]],
[[0.9690]],
[[0.8670]],
[[0.9123]],
[[1.0396]],
[[0.3469]],
[[0.7743]],
[[0.8210]],
[[0.4686]],
[[0.7837]],
[[0.4725]],
[[0.9255]],
[[0.4777]],
[[0.4300]],
[[0.7423]],
[[0.7937]],
[[0.3721]],
[[0.6606]],
[[0.9062]],
[[0.5666]],
[[0.7685]],
[[0.4958]],
[[0.7916]],
[[0.2542]],
[[0.3224]],
[[0.3626]],
[[1.0759]],
[[0.4818]],
[[0.6150]],
[[0.3741]],
[[1.1155]],
[[1.1047]],
[[0.2393]],
[[0.2110]],
[[0.2374]],
[[0.6140]],
[[0.4889]],
[[0.2891]],
[[0.4370]],
[[0.7108]],
[[0.3551]],
[[0.7272]],
[[0.5017]],
[[0.3741]],
[[0.6851]],
[[1.0275]],
[[0.2783]],
[[0.6396]],
[[0.4320]],
[[1.1070]],
[[0.1420]],
[[0.1371]],
[[1.1091]],
[[0.3000]],
[[0.7122]],
[[0.5732]],
[[0.5111]],
[[0.5047]],
[[0.9341]],
[[0.6403]],
[[0.1296]],
[[0.3212]],
[[0.4459]],
[[0.8680]],
[[0.2928]],
[[1.0878]],
[[0.5050]],
[[0.5266]],
[[1.0082]],
[[0.8677]],
[[1.0404]],
[[0.9056]],
[[0.4647]],
[[0.2846]],
[[0.5792]],
[[1.0302]],
[[0.4486]],
[[0.1527]],
[[0.8213]],
[[1.1031]],
[[0.4740]],
[[0.2024]],
[[0.1629]],
[[0.7417]],
[[0.5785]],
[[0.3639]],
[[0.7174]],
[[0.8063]],
[[1.0140]],
[[1.0046]],
[[0.5415]],
[[0.1755]],
[[0.1646]],
[[1.0832]],
[[0.8374]],
[[0.8343]],
[[0.1838]],
[[1.0794]],
[[1.0901]],
[[1.0678]],
[[0.1946]],
[[0.4277]],
[[0.2725]],
[[1.0899]],
[[0.4015]],
[[0.3881]],
[[0.8783]],
[[0.3851]],
[[0.3701]],
[[0.5726]],
[[0.5683]],
[[0.2269]],
[[1.0332]],
[[0.3958]],
[[0.7937]],
[[1.0513]],
[[0.8685]],
[[0.6872]],
[[1.0418]],
[[0.6836]],
[[0.3851]],
[[1.0894]],
[[0.7347]],
[[0.1285]],
[[0.4740]],
[[0.2758]],
[[1.0548]],
[[0.5338]],
[[0.1606]],
[[0.5850]],
[[0.9304]],
[[0.7463]],
[[0.7745]],
[[0.6628]],
[[0.8028]],
[[0.4946]],
[[0.4175]],
[[0.1490]],
[[0.2397]],
[[0.8331]],
[[0.3203]],
[[0.6881]],
[[0.7760]],
[[0.6518]],
[[0.2922]],
[[1.0945]],
[[0.3256]],
[[1.0276]],
[[0.2186]],
[[0.4961]],
[[0.8884]],
[[0.4121]],
[[1.0364]],
[[0.2723]],
[[0.1965]],
[[0.3909]],
[[0.6972]],
[[1.0768]],
[[0.3777]],
[[0.7952]],
[[0.4910]],
[[0.5080]],
[[0.9840]],
[[0.2289]],
[[0.6695]],
[[1.0866]],
[[0.5477]],
[[1.0046]],
[[0.4624]],
[[1.0189]],
[[0.1327]],
[[0.5443]],
[[0.5286]],
[[0.7784]],
[[0.8125]],
[[1.0003]],
[[0.5419]],
[[0.5966]],
[[0.9588]],
[[0.4811]],
[[1.0547]],
[[0.2835]],
[[0.5623]],
[[0.5895]],
[[0.8395]],
[[0.9582]],
[[0.5371]],
[[0.2021]],
[[0.8641]],
[[0.7659]],
[[0.8172]],
[[0.3079]],
[[0.9382]],
[[1.0900]],
[[0.6598]],
[[0.1494]],
[[0.9674]],
[[0.2457]],
[[0.7313]],
[[0.6890]],
[[0.3824]],
[[0.7905]],
[[0.1692]],
[[0.7303]],
[[0.2994]],
[[0.5623]],
[[0.6807]],
[[1.0424]],
[[0.3778]],
[[0.9367]],
[[0.5529]],
[[0.3789]],
[[0.1810]],
[[0.1576]],
[[1.1047]],
[[0.4917]],
[[0.6414]],
[[0.7519]],
[[0.9562]],
[[1.0431]],
[[1.0219]],
[[0.2459]],
[[0.5363]],
[[0.3205]],
[[0.3307]],
[[0.7350]],
[[1.0857]],
[[0.2158]],
[[0.9190]],
[[0.3571]],
[[0.5190]],
[[1.0133]],
[[0.5033]],
[[0.6619]],
[[0.2669]],
[[1.0420]],
[[0.5517]],
[[0.2507]],
[[0.7628]],
[[0.2609]],
[[0.2196]],
[[0.6468]],
[[1.0128]],
[[0.4749]],
[[0.8517]],
[[1.0460]],
[[0.9480]],
[[0.3541]],
[[0.5616]],
[[0.4590]],
[[0.2143]],
[[0.6166]],
[[0.9926]],
[[1.0376]],
[[0.6630]],
[[0.7299]],
[[0.3999]],
[[0.9938]],
[[0.4083]],
[[0.2691]],
[[0.6934]],
[[0.9161]],
[[0.1656]],
[[1.0684]],
[[0.7962]],
[[0.2661]],
[[0.5087]],
[[1.0502]]]]), compiled_model=tensor([[[[0.6126]],
[[1.2848]],
[[1.0160]],
[[1.2694]],
[[1.4213]],
[[2.3958]],
[[1.8068]],
[[1.6497]],
[[1.5160]],
[[1.3800]],
[[1.3065]],
[[1.6005]],
[[1.6513]],
[[1.9601]],
[[1.6164]],
[[1.6097]],
[[1.9246]],
[[1.3664]],
[[1.8692]],
[[0.6697]],
[[2.0294]],
[[1.4288]],
[[1.3829]],
[[1.8493]],
[[1.9277]],
[[1.3387]],
[[1.2251]],
[[1.0213]],
[[0.8859]],
[[0.9770]],
[[0.9892]],
[[1.8138]],
[[0.2374]],
[[0.6005]],
[[0.9480]],
[[0.7636]],
[[0.5917]],
[[1.1473]],
[[0.7865]],
[[1.2318]],
[[1.1798]],
[[0.4441]],
[[1.2248]],
[[0.0906]],
[[1.0227]],
[[1.1868]],
[[1.3895]],
[[0.9885]],
[[1.0885]],
[[1.2370]],
[[1.3197]],
[[1.7293]],
[[0.7099]],
[[1.0727]],
[[1.4802]],
[[0.2418]],
[[1.2931]],
[[0.1234]],
[[0.2045]],
[[1.1550]],
[[1.1481]],
[[0.8601]],
[[0.3194]],
[[1.0265]],
[[1.6777]],
[[1.4126]],
[[1.2075]],
[[1.5906]],
[[1.7955]],
[[1.6138]],
[[0.8287]],
[[0.7832]],
[[1.5611]],
[[1.1540]],
[[1.4845]],
[[0.9444]],
[[1.4038]],
[[0.2444]],
[[1.0907]],
[[0.7321]],
[[0.9338]],
[[1.4633]],
[[1.4846]],
[[0.9817]],
[[0.8220]],
[[0.5517]],
[[0.4593]],
[[1.1572]],
[[1.1279]],
[[0.8503]],
[[1.1452]],
[[1.2103]],
[[1.4099]],
[[1.3334]],
[[0.5659]],
[[0.9579]],
[[1.2076]],
[[0.8140]],
[[0.4364]],
[[0.4326]],
[[1.3254]],
[[1.5006]],
[[1.0986]],
[[1.2798]],
[[0.1399]],
[[1.0973]],
[[0.5150]],
[[0.4405]],
[[0.7021]],
[[0.7749]],
[[0.3826]],
[[1.3529]],
[[0.5807]],
[[1.3827]],
[[1.0387]],
[[1.0401]],
[[1.1456]],
[[0.9159]],
[[1.1292]],
[[1.1570]],
[[0.8227]],
[[0.9625]],
[[0.4529]],
[[1.6120]],
[[0.3370]],
[[0.8798]],
[[0.8990]],
[[0.9069]],
[[1.1384]],
[[0.4043]],
[[1.1263]],
[[1.6357]],
[[0.2014]],
[[1.0763]],
[[1.7242]],
[[1.3095]],
[[0.8056]],
[[0.6268]],
[[0.8594]],
[[1.1548]],
[[1.3673]],
[[1.2529]],
[[1.3709]],
[[0.8524]],
[[0.3360]],
[[0.9610]],
[[1.6263]],
[[1.1979]],
[[1.0936]],
[[0.8229]],
[[0.1400]],
[[0.1710]],
[[1.3183]],
[[0.9412]],
[[1.1481]],
[[0.1308]],
[[1.6401]],
[[0.4380]],
[[0.8501]],
[[0.6224]],
[[1.3691]],
[[0.9949]],
[[0.9480]],
[[0.5096]],
[[0.6035]],
[[1.3836]],
[[0.5854]],
[[1.1615]],
[[0.4958]],
[[1.3817]],
[[1.2827]],
[[0.5330]],
[[1.7376]],
[[0.6885]],
[[0.6305]],
[[1.0935]],
[[0.9451]],
[[0.9119]],
[[1.4459]],
[[0.1576]],
[[1.1332]],
[[0.4227]],
[[1.6680]],
[[1.2446]],
[[1.4015]],
[[1.3785]],
[[0.8574]],
[[0.6743]],
[[1.0355]],
[[1.4556]],
[[1.5962]],
[[1.2893]],
[[0.7187]],
[[0.8241]],
[[0.6641]],
[[0.2641]],
[[0.5556]],
[[1.2640]],
[[1.0477]],
[[1.3508]],
[[1.2026]],
[[1.1886]],
[[0.6963]],
[[0.9974]],
[[0.5141]],
[[1.0149]],
[[1.0223]],
[[1.0912]],
[[1.3983]],
[[0.8444]],
[[0.8843]],
[[1.1670]],
[[0.6696]],
[[1.3738]],
[[1.0526]],
[[1.2752]],
[[0.3498]],
[[1.0497]],
[[1.2026]],
[[0.3160]],
[[1.1349]],
[[1.4383]],
[[1.6453]],
[[1.3276]],
[[0.2614]],
[[1.5299]],
[[0.2603]],
[[0.6403]],
[[0.4558]],
[[0.9367]],
[[1.3269]],
[[1.8576]],
[[0.6303]],
[[1.4144]],
[[1.6265]],
[[0.7495]],
[[0.7885]],
[[1.0388]],
[[0.9865]],
[[1.3551]],
[[1.5915]],
[[1.2005]],
[[0.8924]],
[[1.2920]],
[[1.6775]],
[[0.8823]],
[[1.4878]],
[[0.1983]],
[[1.4544]],
[[0.7748]],
[[0.2464]],
[[1.3077]],
[[1.4468]],
[[1.0747]],
[[0.6312]],
[[0.7790]],
[[0.9196]],
[[1.1931]],
[[1.2566]],
[[0.6364]],
[[0.8211]],
[[1.3929]],
[[1.4142]],
[[1.6697]],
[[1.5049]],
[[1.1232]],
[[1.0635]],
[[1.4802]],
[[1.2590]],
[[0.7796]],
[[0.4875]],
[[0.8763]],
[[0.4816]],
[[1.3006]],
[[0.7318]],
[[0.8392]],
[[1.7960]],
[[1.3755]],
[[0.6008]],
[[1.4453]],
[[0.9797]],
[[1.0866]],
[[1.0618]],
[[2.0797]],
[[1.6812]],
[[2.5374]],
[[1.4114]],
[[1.3509]],
[[0.5059]],
[[0.8936]],
[[0.7964]],
[[1.8690]],
[[2.1023]],
[[1.8946]],
[[1.0272]],
[[0.9288]],
[[1.8206]],
[[1.4754]],
[[1.3074]],
[[2.0077]],
[[1.1789]],
[[2.2261]],
[[1.3041]],
[[1.4190]],
[[1.3649]],
[[1.8771]],
[[1.8186]],
[[3.4382]],
[[2.2413]],
[[2.0677]],
[[0.9465]],
[[1.2515]],
[[0.9109]],
[[1.9000]],
[[2.4696]],
[[2.2930]],
[[2.2815]],
[[1.5966]],
[[2.3724]],
[[0.8604]],
[[1.5051]],
[[2.4747]],
[[1.3839]],
[[2.6422]],
[[1.9355]],
[[2.6077]],
[[3.5365]],
[[2.5946]],
[[2.4315]],
[[2.0030]],
[[2.0260]],
[[1.8532]],
[[2.0702]],
[[2.1804]],
[[1.3509]],
[[1.3079]],
[[1.8872]],
[[3.2374]],
[[2.5755]],
[[2.5748]],
[[1.7180]],
[[1.9157]],
[[2.1155]],
[[2.0252]],
[[2.1283]],
[[2.6453]],
[[1.8267]],
[[2.7580]],
[[1.2133]],
[[3.0068]],
[[2.4631]],
[[3.5406]],
[[2.6474]],
[[1.5911]],
[[2.0927]],
[[1.9897]],
[[1.1013]],
[[2.2189]],
[[1.8797]],
[[1.7149]],
[[1.8627]],
[[1.7744]],
[[1.2092]],
[[1.9356]],
[[3.0628]],
[[1.8478]],
[[2.9875]],
[[1.5559]],
[[1.8827]],
[[2.6651]],
[[0.9461]],
[[1.6017]],
[[1.6695]],
[[2.5003]],
[[2.0576]],
[[1.9773]],
[[2.5010]],
[[1.9855]],
[[2.6145]],
[[1.5627]],
[[1.5144]],
[[2.3694]],
[[1.9382]],
[[1.3083]],
[[1.9379]],
[[2.4450]],
[[2.2285]],
[[1.9401]],
[[1.7861]],
[[2.8128]],
[[2.4591]],
[[1.8723]],
[[2.0155]],
[[2.3503]],
[[1.1160]],
[[2.1577]],
[[3.0725]],
[[2.0452]],
[[0.6609]],
[[2.4660]],
[[2.6077]],
[[2.1282]],
[[1.7682]],
[[2.1097]],
[[1.4182]],
[[1.2314]],
[[1.5169]],
[[1.6490]],
[[1.9213]],
[[2.4415]],
[[2.2367]],
[[2.2775]],
[[2.6166]],
[[2.1888]],
[[1.9791]],
[[1.9199]],
[[2.5306]],
[[2.4902]],
[[2.8823]],
[[2.0688]],
[[1.8810]],
[[1.0920]],
[[2.3474]],
[[2.4195]],
[[1.9421]],
[[0.5074]],
[[1.7197]],
[[0.8028]],
[[1.4251]],
[[1.6292]],
[[1.6941]],
[[1.4418]],
[[2.7376]],
[[1.3668]],
[[1.7058]],
[[1.9953]],
[[2.4717]],
[[1.0840]],
[[2.1752]],
[[1.1282]],
[[1.7456]],
[[2.2936]],
[[2.1847]],
[[2.0868]],
[[0.4776]],
[[2.6363]],
[[2.3964]],
[[2.2233]],
[[2.1941]],
[[1.0426]],
[[1.0969]],
[[1.7524]],
[[2.8103]],
[[2.5829]],
[[2.1974]],
[[1.6278]],
[[1.8984]],
[[1.5967]],
[[1.8116]],
[[2.0388]],
[[1.2349]],
[[2.2243]],
[[1.9059]],
[[0.7112]],
[[1.3967]],
[[2.2582]],
[[1.9320]],
[[2.0975]],
[[0.8482]],
[[1.9527]],
[[2.3510]],
[[2.3400]],
[[2.5700]],
[[2.5577]],
[[1.8749]],
[[1.5995]],
[[1.1973]],
[[1.8890]],
[[1.2458]],
[[1.1657]],
[[1.5863]],
[[1.8063]],
[[1.9647]],
[[2.4828]],
[[2.3427]],
[[2.1381]],
[[2.0973]],
[[2.0794]],
[[2.0545]],
[[2.4333]],
[[1.9909]],
[[2.5594]],
[[1.7360]],
[[1.3406]],
[[2.4576]],
[[1.5924]],
[[0.8783]],
[[1.9326]],
[[2.5579]],
[[2.5777]],
[[0.7577]],
[[3.0607]],
[[1.9372]],
[[2.2474]],
[[1.8932]],
[[1.8002]],
[[2.3377]],
[[2.6462]],
[[2.0837]],
[[2.9944]],
[[2.2693]],
[[1.8470]],
[[2.0004]],
[[1.0882]],
[[2.2923]],
[[1.8939]],
[[1.6876]],
[[2.1404]],
[[2.1730]],
[[2.0086]],
[[2.4434]],
[[1.8390]],
[[2.5747]],
[[1.5938]],
[[1.2155]],
[[1.7041]],
[[1.6980]],
[[2.0306]],
[[2.0170]],
[[1.1282]],
[[2.1298]],
[[2.4408]],
[[1.2412]],
[[2.5821]],
[[1.6976]],
[[1.4172]],
[[1.6449]],
[[1.7079]],
[[2.0430]],
[[2.6771]],
[[2.5491]],
[[2.7351]],
[[2.0681]],
[[1.9967]],
[[2.7066]],
[[2.1135]],
[[2.1336]],
[[1.1756]],
[[1.6907]],
[[2.1122]],
[[1.2308]],
[[1.0500]],
[[3.4394]],
[[1.1448]],
[[1.3113]],
[[2.0852]],
[[1.6113]],
[[1.1555]],
[[1.5977]],
[[2.3042]],
[[0.6561]],
[[1.4503]],
[[1.4592]],
[[2.1307]],
[[0.9457]],
[[1.6667]],
[[0.6475]],
[[1.5688]],
[[2.2034]],
[[1.4415]],
[[1.9518]],
[[1.0384]],
[[2.0780]],
[[1.2406]],
[[1.7417]],
[[1.4410]],
[[1.4012]],
[[1.3812]],
[[1.4452]],
[[2.3342]],
[[0.9673]],
[[0.7199]],
[[1.2747]],
[[1.3745]],
[[1.8599]],
[[0.6956]],
[[0.7423]],
[[2.0261]],
[[1.9932]],
[[1.3647]],
[[1.2305]],
[[1.2676]],
[[1.1903]],
[[2.4475]],
[[2.1162]],
[[2.2319]],
[[1.5944]],
[[2.1591]],
[[1.7946]],
[[1.9572]],
[[1.7890]],
[[1.5016]],
[[1.4994]],
[[1.5437]],
[[1.8230]],
[[0.7076]],
[[1.3923]],
[[0.8836]],
[[2.1523]],
[[1.7927]],
[[1.2108]],
[[1.7866]],
[[2.0229]],
[[1.4151]],
[[1.6562]],
[[2.7083]],
[[1.5731]],
[[1.5418]],
[[2.2979]],
[[1.8049]],
[[1.0198]],
[[2.5689]],
[[1.9207]],
[[1.5669]],
[[1.0022]],
[[1.0122]],
[[1.5896]],
[[1.5286]],
[[0.8092]],
[[1.8269]],
[[2.2194]],
[[2.2483]],
[[1.1044]],
[[1.8965]],
[[1.2287]],
[[1.3173]],
[[0.6925]],
[[0.2392]],
[[0.3092]],
[[2.1677]],
[[1.3594]],
[[1.9699]],
[[1.7562]],
[[1.5506]],
[[1.1047]],
[[1.7907]],
[[0.9408]],
[[2.2968]],
[[0.3507]],
[[1.0683]],
[[0.9858]],
[[1.3872]],
[[2.1478]],
[[1.7414]],
[[1.4622]],
[[1.8111]],
[[2.3387]],
[[1.6464]],
[[1.7140]],
[[2.6531]],
[[1.0672]],
[[1.9934]],
[[2.4933]],
[[1.7842]],
[[1.1698]],
[[1.8800]],
[[1.2470]],
[[1.6057]],
[[0.9578]],
[[1.5573]],
[[0.5278]],
[[1.6522]],
[[1.5528]],
[[1.8032]],
[[1.6927]],
[[2.4716]],
[[2.0874]],
[[1.7234]],
[[1.2143]],
[[1.4172]],
[[2.5625]],
[[0.4545]],
[[1.8022]],
[[1.4658]],
[[1.8691]],
[[1.9522]],
[[1.0924]],
[[0.8325]],
[[2.6378]],
[[1.7633]],
[[1.7439]],
[[0.9657]],
[[1.3678]],
[[2.2543]],
[[1.4037]],
[[1.4686]],
[[1.8236]],
[[1.2360]],
[[1.4836]],
[[1.5494]],
[[1.8001]],
[[0.7246]],
[[1.0451]],
[[1.0195]],
[[0.9725]],
[[1.7612]],
[[1.2553]],
[[1.6735]],
[[1.7654]],
[[1.3805]],
[[1.4142]],
[[1.1884]],
[[1.3351]],
[[0.8620]],
[[2.2143]],
[[0.7008]],
[[1.4919]],
[[1.3344]],
[[1.9377]],
[[2.2101]],
[[1.5036]],
[[1.3469]],
[[1.2364]],
[[0.8899]],
[[1.0841]],
[[1.2609]],
[[2.7297]],
[[1.7304]],
[[2.4812]],
[[1.3597]],
[[1.1469]],
[[2.6549]],
[[1.2182]],
[[1.4476]],
[[0.9516]],
[[1.4513]],
[[1.1619]],
[[0.7207]],
[[1.6423]],
[[1.0177]],
[[0.8695]],
[[2.2868]],
[[2.0222]],
[[1.2354]],
[[1.0886]],
[[2.0872]],
[[1.8500]],
[[1.4527]],
[[1.9035]],
[[1.2301]],
[[1.7380]],
[[1.3231]],
[[1.1438]],
[[2.4368]],
[[0.8798]],
[[1.6363]],
[[0.5542]],
[[1.6405]],
[[1.2016]],
[[1.8588]],
[[1.0939]],
[[2.1510]],
[[1.6793]],
[[2.2943]],
[[1.2602]],
[[1.3874]],
[[1.4820]],
[[2.2083]]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_add.Add1'>, [((1, 768, 1, 1), torch.float32)], {'model_name': ['pt_vovnet_ese_vovnet39b_obj_det_torchhub', 'pt_vovnet_ese_vovnet19b_dw_obj_det_torchhub', 'pt_vovnet_ese_vovnet99b_obj_det_torchhub']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0b789d0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Add")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_add.py:18561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d0e2b0a0>
fw_out = tensor([[[[0.6126]],
[[0.8846]],
[[0.2049]],
[[0.2484]],
[[0.4238]],
...1656]],
[[1.0684]],
[[0.7962]],
[[0.2661]],
[[0.5087]],
[[1.0502]]]])
co_out = tensor([[[[0.6126]],
[[1.2848]],
[[1.0160]],
[[1.2694]],
[[1.4213]],
...6793]],
[[2.2943]],
[[1.2602]],
[[1.3874]],
[[1.4820]],
[[2.2083]]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[[0.6126]],
E
E [[0.8846]],
E
E [[0.2049]],
E
E [[0.2484]],
E
E [[0.4238]],
E
E [[0.7505]],
E
E [[0.6065]],
E
E [[1.0128]],
E
E [[0.5720]],
E
E [[0.7487]],
E
E [[0.4653]],
E
E [[0.5181]],
E
E [[0.1387]],
E
E [[0.2852]],
E
E [[0.4103]],
E
E [[0.6349]],
E
E [[0.8141]],
E
E [[0.9164]],
E
E [[0.2774]],
E
E [[0.3987]],
E
E [[0.7980]],
E
E [[1.0316]],
E
E [[0.5135]],
E
E [[0.9905]],
E
E [[0.5358]],
E
E [[0.6693]],
E
E [[1.0691]],
E
E [[0.1525]],
E
E [[0.3016]],
E
E [[0.4898]],
E
E [[0.4215]],
E
E [[1.0484]],
E
E [[0.2923]],
E
E [[0.3862]],
E
E [[0.2671]],
E
E [[0.1481]],
E
E [[0.3245]],
E
E [[1.0462]],
E
E [[0.8395]],
E
E [[0.8587]],
E
E [[0.6427]],
E
E [[0.3600]],
E
E [[0.7010]],
E
E [[0.1495]],
E
E [[0.2551]],
E
E [[0.3586]],
E
E [[0.9319]],
E
E [[0.9095]],
E
E [[0.3946]],
E
E [[0.5983]],
E
E [[0.9362]],
E
E [[1.1134]],
E
E [[0.8148]],
E
E [[0.6839]],
E
E [[0.9516]],
E
E [[0.3220]],
E
E [[0.7096]],
E
E [[0.2287]],
E
E [[0.2698]],
E
E [[0.3581]],
E
E [[0.8426]],
E
E [[0.8175]],
E
E [[0.3202]],
E
E [[0.7674]],
E
E [[0.8909]],
E
E [[0.5533]],
E
E [[0.6355]],
E
E [[0.7322]],
E
E [[0.9266]],
E
E [[1.0965]],
E
E [[0.2311]],
E
E [[0.4331]],
E
E [[0.8129]],
E
E [[1.0307]],
E
E [[1.0515]],
E
E [[1.0576]],
E
E [[0.7159]],
E
E [[0.1816]],
E
E [[0.6624]],
E
E [[0.3036]],
E
E [[0.1504]],
E
E [[1.0606]],
E
E [[0.9966]],
E
E [[0.1176]],
E
E [[0.7100]],
E
E [[0.5322]],
E
E [[0.5341]],
E
E [[0.3875]],
E
E [[0.8087]],
E
E [[0.3202]],
E
E [[0.7997]],
E
E [[0.8692]],
E
E [[0.9743]],
E
E [[0.8033]],
E
E [[0.1215]],
E
E [[0.2920]],
E
E [[0.8660]],
E
E [[0.7210]],
E
E [[0.2263]],
E
E [[0.3285]],
E
E [[1.0868]],
E
E [[0.9533]],
E
E [[0.3984]],
E
E [[0.4905]],
E
E [[0.1401]],
E
E [[0.6074]],
E
E [[0.2399]],
E
E [[0.2307]],
E
E [[0.5888]],
E
E [[0.6915]],
E
E [[0.4116]],
E
E [[0.9131]],
E
E [[0.3121]],
E
E [[1.0701]],
E
E [[0.9590]],
E
E [[0.1947]],
E
E [[0.4919]],
E
E [[0.6389]],
E
E [[0.6893]],
E
E [[0.7350]],
E
E [[0.8126]],
E
E [[0.6463]],
E
E [[0.3724]],
E
E [[0.8530]],
E
E [[0.1368]],
E
E [[0.3200]],
E
E [[0.4912]],
E
E [[0.3728]],
E
E [[0.4415]],
E
E [[0.2066]],
E
E [[0.5100]],
E
E [[0.7233]],
E
E [[0.2906]],
E
E [[0.5907]],
E
E [[0.9743]],
E
E [[0.5650]],
E
E [[0.6303]],
E
E [[0.5732]],
E
E [[0.7176]],
E
E [[0.9343]],
E
E [[1.0900]],
E
E [[0.9339]],
E
E [[1.0911]],
E
E [[0.5802]],
E
E [[0.1672]],
E
E [[0.3793]],
E
E [[0.9568]],
E
E [[0.6131]],
E
E [[0.3679]],
E
E [[0.2332]],
E
E [[0.1485]],
E
E [[0.1944]],
E
E [[0.5150]],
E
E [[0.8906]],
E
E [[0.8867]],
E
E [[0.1342]],
E
E [[0.9283]],
E
E [[0.2251]],
E
E [[0.5107]],
E
E [[0.4136]],
E
E [[0.5201]],
E
E [[0.5182]],
E
E [[0.1677]],
E
E [[0.1847]],
E
E [[0.5381]],
E
E [[0.6228]],
E
E [[0.3892]],
E
E [[0.8047]],
E
E [[0.1664]],
E
E [[0.5826]],
E
E [[1.0561]],
E
E [[0.4124]],
E
E [[1.0679]],
E
E [[0.7975]],
E
E [[0.1652]],
E
E [[0.9327]],
E
E [[0.5587]],
E
E [[0.3932]],
E
E [[1.0162]],
E
E [[0.2123]],
E
E [[0.6700]],
E
E [[0.5117]],
E
E [[0.9734]],
E
E [[0.7560]],
E
E [[0.8566]],
E
E [[0.7930]],
E
E [[0.4961]],
E
E [[0.5112]],
E
E [[0.2043]],
E
E [[0.8873]],
E
E [[1.0134]],
E
E [[0.9585]],
E
E [[0.2637]],
E
E [[0.6387]],
E
E [[0.2639]],
E
E [[0.3411]],
E
E [[0.3250]],
E
E [[0.7873]],
E
E [[0.3184]],
E
E [[0.6055]],
E
E [[0.6374]],
E
E [[0.9387]],
E
E [[0.2384]],
E
E [[0.2731]],
E
E [[0.3261]],
E
E [[0.9663]],
E
E [[0.4367]],
E
E [[1.0381]],
E
E [[0.7972]],
E
E [[0.6797]],
E
E [[0.6127]],
E
E [[0.5175]],
E
E [[0.6791]],
E
E [[0.5022]],
E
E [[0.6129]],
E
E [[0.6802]],
E
E [[0.2253]],
E
E [[0.3543]],
E
E [[1.0201]],
E
E [[0.2106]],
E
E [[0.5805]],
E
E [[1.1110]],
E
E [[0.7970]],
E
E [[0.6305]],
E
E [[0.1831]],
E
E [[0.8641]],
E
E [[0.2602]],
E
E [[0.4745]],
E
E [[0.4486]],
E
E [[0.5423]],
E
E [[0.6219]],
E
E [[1.0288]],
E
E [[0.6788]],
E
E [[1.0642]],
E
E [[0.9222]],
E
E [[0.3003]],
E
E [[0.8406]],
E
E [[0.2629]],
E
E [[0.4045]],
E
E [[0.7634]],
E
E [[0.7815]],
E
E [[0.9915]],
E
E [[0.4554]],
E
E [[0.6172]],
E
E [[0.8738]],
E
E [[0.1328]],
E
E [[0.9779]],
E
E [[0.2029]],
E
E [[0.6233]],
E
E [[0.5314]],
E
E [[0.3530]],
E
E [[0.6825]],
E
E [[1.0298]],
E
E [[0.4702]],
E
E [[0.3195]],
E
E [[0.4315]],
E
E [[0.1208]],
E
E [[0.8421]],
E
E [[0.3763]],
E
E [[0.2827]],
E
E [[0.3283]],
E
E [[0.9039]],
E
E [[0.8812]],
E
E [[1.0001]],
E
E [[0.7977]],
E
E [[0.4494]],
E
E [[0.4767]],
E
E [[0.7641]],
E
E [[1.0274]],
E
E [[0.7523]],
E
E [[0.3798]],
E
E [[0.3813]],
E
E [[0.1436]],
E
E [[0.7244]],
E
E [[0.3358]],
E
E [[0.1706]],
E
E [[1.0548]],
E
E [[0.2917]],
E
E [[0.5595]],
E
E [[0.7596]],
E
E [[0.6323]],
E
E [[0.2799]],
E
E [[0.2122]],
E
E [[1.0149]],
E
E [[0.6978]],
E
E [[1.0312]],
E
E [[0.4488]],
E
E [[0.7637]],
E
E [[0.5020]],
E
E [[0.5941]],
E
E [[0.3119]],
E
E [[0.7855]],
E
E [[0.7745]],
E
E [[0.6061]],
E
E [[0.5039]],
E
E [[0.3082]],
E
E [[0.9622]],
E
E [[0.2442]],
E
E [[0.8212]],
E
E [[0.4483]],
E
E [[0.3751]],
E
E [[0.7062]],
E
E [[0.3567]],
E
E [[0.7316]],
E
E [[0.7146]],
E
E [[0.2451]],
E
E [[0.6996]],
E
E [[0.8293]],
E
E [[0.8143]],
E
E [[0.5534]],
E
E [[0.2065]],
E
E [[0.5393]],
E
E [[0.7900]],
E
E [[0.4339]],
E
E [[0.8062]],
E
E [[0.9494]],
E
E [[0.3553]],
E
E [[0.6213]],
E
E [[0.8231]],
E
E [[0.6556]],
E
E [[0.6581]],
E
E [[0.6788]],
E
E [[0.2233]],
E
E [[0.6557]],
E
E [[0.9626]],
E
E [[1.0669]],
E
E [[0.9103]],
E
E [[0.6834]],
E
E [[0.8499]],
E
E [[0.3731]],
E
E [[0.2020]],
E
E [[0.1864]],
E
E [[1.1152]],
E
E [[0.9338]],
E
E [[0.2708]],
E
E [[0.8120]],
E
E [[0.9940]],
E
E [[1.1162]],
E
E [[1.0536]],
E
E [[1.0038]],
E
E [[0.5018]],
E
E [[0.4409]],
E
E [[1.0269]],
E
E [[0.8966]],
E
E [[0.3155]],
E
E [[1.0659]],
E
E [[0.8580]],
E
E [[0.8890]],
E
E [[0.3030]],
E
E [[0.7598]],
E
E [[0.4411]],
E
E [[1.0071]],
E
E [[0.5264]],
E
E [[0.8110]],
E
E [[0.7052]],
E
E [[0.8291]],
E
E [[0.4465]],
E
E [[0.8602]],
E
E [[0.2671]],
E
E [[0.7293]],
E
E [[0.2781]],
E
E [[0.1231]],
E
E [[0.2149]],
E
E [[1.0111]],
E
E [[0.8869]],
E
E [[1.0855]],
E
E [[1.0169]],
E
E [[0.1699]],
E
E [[0.2752]],
E
E [[0.5356]],
E
E [[0.2917]],
E
E [[0.9636]],
E
E [[0.2384]],
E
E [[0.3724]],
E
E [[0.1333]],
E
E [[0.3325]],
E
E [[1.0276]],
E
E [[1.0258]],
E
E [[0.9743]],
E
E [[1.0024]],
E
E [[1.0610]],
E
E [[0.4884]],
E
E [[0.8364]],
E
E [[1.0618]],
E
E [[0.7818]],
E
E [[1.1162]],
E
E [[0.8757]],
E
E [[0.9272]],
E
E [[0.4414]],
E
E [[0.8563]],
E
E [[0.6738]],
E
E [[0.4970]],
E
E [[0.3345]],
E
E [[0.3358]],
E
E [[0.2316]],
E
E [[0.9520]],
E
E [[0.9718]],
E
E [[0.5595]],
E
E [[0.3270]],
E
E [[1.0028]],
E
E [[0.9361]],
E
E [[0.6535]],
E
E [[0.3803]],
E
E [[1.0759]],
E
E [[0.8209]],
E
E [[0.2368]],
E
E [[1.0949]],
E
E [[0.9961]],
E
E [[0.4341]],
E
E [[0.8975]],
E
E [[0.3323]],
E
E [[0.5380]],
E
E [[1.0409]],
E
E [[0.6370]],
E
E [[0.2628]],
E
E [[0.4493]],
E
E [[0.4807]],
E
E [[0.5199]],
E
E [[0.6642]],
E
E [[1.0788]],
E
E [[0.6432]],
E
E [[0.3077]],
E
E [[0.6420]],
E
E [[0.8561]],
E
E [[0.8644]],
E
E [[0.1594]],
E
E [[0.5269]],
E
E [[0.2448]],
E
E [[0.4030]],
E
E [[0.7965]],
E
E [[0.2613]],
E
E [[0.8022]],
E
E [[1.0408]],
E
E [[0.6492]],
E
E [[0.2831]],
E
E [[0.4372]],
E
E [[0.7256]],
E
E [[0.2352]],
E
E [[0.8648]],
E
E [[0.1624]],
E
E [[0.1357]],
E
E [[0.1306]],
E
E [[0.5150]],
E
E [[0.9526]],
E
E [[0.1431]],
E
E [[1.0320]],
E
E [[0.4164]],
E
E [[0.7628]],
E
E [[0.6392]],
E
E [[0.1655]],
E
E [[1.0310]],
E
E [[0.8856]],
E
E [[1.1134]],
E
E [[0.8690]],
E
E [[0.2863]],
E
E [[1.0337]],
E
E [[0.6433]],
E
E [[0.8535]],
E
E [[0.2155]],
E
E [[0.4726]],
E
E [[0.1254]],
E
E [[0.4216]],
E
E [[0.7242]],
E
E [[0.2238]],
E
E [[0.7758]],
E
E [[0.8848]],
E
E [[0.6860]],
E
E [[0.2818]],
E
E [[0.2287]],
E
E [[0.4621]],
E
E [[0.8359]],
E
E [[1.1096]],
E
E [[0.9039]],
E
E [[0.5601]],
E
E [[0.7917]],
E
E [[0.1259]],
E
E [[0.1893]],
E
E [[0.8497]],
E
E [[0.3332]],
E
E [[0.8569]],
E
E [[0.2634]],
E
E [[0.3687]],
E
E [[0.2045]],
E
E [[0.8773]],
E
E [[0.5654]],
E
E [[1.0012]],
E
E [[0.9258]],
E
E [[0.8931]],
E
E [[0.6325]],
E
E [[0.4618]],
E
E [[0.5077]],
E
E [[0.6828]],
E
E [[0.8642]],
E
E [[0.2661]],
E
E [[1.0360]],
E
E [[0.5620]],
E
E [[0.1974]],
E
E [[0.3459]],
E
E [[1.0588]],
E
E [[1.0736]],
E
E [[0.1532]],
E
E [[0.9690]],
E
E [[0.8670]],
E
E [[0.9123]],
E
E [[1.0396]],
E
E [[0.3469]],
E
E [[0.7743]],
E
E [[0.8210]],
E
E [[0.4686]],
E
E [[0.7837]],
E
E [[0.4725]],
E
E [[0.9255]],
E
E [[0.4777]],
E
E [[0.4300]],
E
E [[0.7423]],
E
E [[0.7937]],
E
E [[0.3721]],
E
E [[0.6606]],
E
E [[0.9062]],
E
E [[0.5666]],
E
E [[0.7685]],
E
E [[0.4958]],
E
E [[0.7916]],
E
E [[0.2542]],
E
E [[0.3224]],
E
E [[0.3626]],
E
E [[1.0759]],
E
E [[0.4818]],
E
E [[0.6150]],
E
E [[0.3741]],
E
E [[1.1155]],
E
E [[1.1047]],
E
E [[0.2393]],
E
E [[0.2110]],
E
E [[0.2374]],
E
E [[0.6140]],
E
E [[0.4889]],
E
E [[0.2891]],
E
E [[0.4370]],
E
E [[0.7108]],
E
E [[0.3551]],
E
E [[0.7272]],
E
E [[0.5017]],
E
E [[0.3741]],
E
E [[0.6851]],
E
E [[1.0275]],
E
E [[0.2783]],
E
E [[0.6396]],
E
E [[0.4320]],
E
E [[1.1070]],
E
E [[0.1420]],
E
E [[0.1371]],
E
E [[1.1091]],
E
E [[0.3000]],
E
E [[0.7122]],
E
E [[0.5732]],
E
E [[0.5111]],
E
E [[0.5047]],
E
E [[0.9341]],
E
E [[0.6403]],
E
E [[0.1296]],
E
E [[0.3212]],
E
E [[0.4459]],
E
E [[0.8680]],
E
E [[0.2928]],
E
E [[1.0878]],
E
E [[0.5050]],
E
E [[0.5266]],
E
E [[1.0082]],
E
E [[0.8677]],
E
E [[1.0404]],
E
E [[0.9056]],
E
E [[0.4647]],
E
E [[0.2846]],
E
E [[0.5792]],
E
E [[1.0302]],
E
E [[0.4486]],
E
E [[0.1527]],
E
E [[0.8213]],
E
E [[1.1031]],
E
E [[0.4740]],
E
E [[0.2024]],
E
E [[0.1629]],
E
E [[0.7417]],
E
E [[0.5785]],
E
E [[0.3639]],
E
E [[0.7174]],
E
E [[0.8063]],
E
E [[1.0140]],
E
E [[1.0046]],
E
E [[0.5415]],
E
E [[0.1755]],
E
E [[0.1646]],
E
E [[1.0832]],
E
E [[0.8374]],
E
E [[0.8343]],
E
E [[0.1838]],
E
E [[1.0794]],
E
E [[1.0901]],
E
E [[1.0678]],
E
E [[0.1946]],
E
E [[0.4277]],
E
E [[0.2725]],
E
E [[1.0899]],
E
E [[0.4015]],
E
E [[0.3881]],
E
E [[0.8783]],
E
E [[0.3851]],
E
E [[0.3701]],
E
E [[0.5726]],
E
E [[0.5683]],
E
E [[0.2269]],
E
E [[1.0332]],
E
E [[0.3958]],
E
E [[0.7937]],
E
E [[1.0513]],
E
E [[0.8685]],
E
E [[0.6872]],
E
E [[1.0418]],
E
E [[0.6836]],
E
E [[0.3851]],
E
E [[1.0894]],
E
E [[0.7347]],
E
E [[0.1285]],
E
E [[0.4740]],
E
E [[0.2758]],
E
E [[1.0548]],
E
E [[0.5338]],
E
E [[0.1606]],
E
E [[0.5850]],
E
E [[0.9304]],
E
E [[0.7463]],
E
E [[0.7745]],
E
E [[0.6628]],
E
E [[0.8028]],
E
E [[0.4946]],
E
E [[0.4175]],
E
E [[0.1490]],
E
E [[0.2397]],
E
E [[0.8331]],
E
E [[0.3203]],
E
E [[0.6881]],
E
E [[0.7760]],
E
E [[0.6518]],
E
E [[0.2922]],
E
E [[1.0945]],
E
E [[0.3256]],
E
E [[1.0276]],
E
E [[0.2186]],
E
E [[0.4961]],
E
E [[0.8884]],
E
E [[0.4121]],
E
E [[1.0364]],
E
E [[0.2723]],
E
E [[0.1965]],
E
E [[0.3909]],
E
E [[0.6972]],
E
E [[1.0768]],
E
E [[0.3777]],
E
E [[0.7952]],
E
E [[0.4910]],
E
E [[0.5080]],
E
E [[0.9840]],
E
E [[0.2289]],
E
E [[0.6695]],
E
E [[1.0866]],
E
E [[0.5477]],
E
E [[1.0046]],
E
E [[0.4624]],
E
E [[1.0189]],
E
E [[0.1327]],
E
E [[0.5443]],
E
E [[0.5286]],
E
E [[0.7784]],
E
E [[0.8125]],
E
E [[1.0003]],
E
E [[0.5419]],
E
E [[0.5966]],
E
E [[0.9588]],
E
E [[0.4811]],
E
E [[1.0547]],
E
E [[0.2835]],
E
E [[0.5623]],
E
E [[0.5895]],
E
E [[0.8395]],
E
E [[0.9582]],
E
E [[0.5371]],
E
E [[0.2021]],
E
E [[0.8641]],
E
E [[0.7659]],
E
E [[0.8172]],
E
E [[0.3079]],
E
E [[0.9382]],
E
E [[1.0900]],
E
E [[0.6598]],
E
E [[0.1494]],
E
E [[0.9674]],
E
E [[0.2457]],
E
E [[0.7313]],
E
E [[0.6890]],
E
E [[0.3824]],
E
E [[0.7905]],
E
E [[0.1692]],
E
E [[0.7303]],
E
E [[0.2994]],
E
E [[0.5623]],
E
E [[0.6807]],
E
E [[1.0424]],
E
E [[0.3778]],
E
E [[0.9367]],
E
E [[0.5529]],
E
E [[0.3789]],
E
E [[0.1810]],
E
E [[0.1576]],
E
E [[1.1047]],
E
E [[0.4917]],
E
E [[0.6414]],
E
E [[0.7519]],
E
E [[0.9562]],
E
E [[1.0431]],
E
E [[1.0219]],
E
E [[0.2459]],
E
E [[0.5363]],
E
E [[0.3205]],
E
E [[0.3307]],
E
E [[0.7350]],
E
E [[1.0857]],
E
E [[0.2158]],
E
E [[0.9190]],
E
E [[0.3571]],
E
E [[0.5190]],
E
E [[1.0133]],
E
E [[0.5033]],
E
E [[0.6619]],
E
E [[0.2669]],
E
E [[1.0420]],
E
E [[0.5517]],
E
E [[0.2507]],
E
E [[0.7628]],
E
E [[0.2609]],
E
E [[0.2196]],
E
E [[0.6468]],
E
E [[1.0128]],
E
E [[0.4749]],
E
E [[0.8517]],
E
E [[1.0460]],
E
E [[0.9480]],
E
E [[0.3541]],
E
E [[0.5616]],
E
E [[0.4590]],
E
E [[0.2143]],
E
E [[0.6166]],
E
E [[0.9926]],
E
E [[1.0376]],
E
E [[0.6630]],
E
E [[0.7299]],
E
E [[0.3999]],
E
E [[0.9938]],
E
E [[0.4083]],
E
E [[0.2691]],
E
E [[0.6934]],
E
E [[0.9161]],
E
E [[0.1656]],
E
E [[1.0684]],
E
E [[0.7962]],
E
E [[0.2661]],
E
E [[0.5087]],
E
E [[1.0502]]]]), compiled_model=tensor([[[[0.6126]],
E
E [[1.2848]],
E
E [[1.0160]],
E
E [[1.2694]],
E
E [[1.4213]],
E
E [[2.3958]],
E
E [[1.8068]],
E
E [[1.6497]],
E
E [[1.5160]],
E
E [[1.3800]],
E
E [[1.3065]],
E
E [[1.6005]],
E
E [[1.6513]],
E
E [[1.9601]],
E
E [[1.6164]],
E
E [[1.6097]],
E
E [[1.9246]],
E
E [[1.3664]],
E
E [[1.8692]],
E
E [[0.6697]],
E
E [[2.0294]],
E
E [[1.4288]],
E
E [[1.3829]],
E
E [[1.8493]],
E
E [[1.9277]],
E
E [[1.3387]],
E
E [[1.2251]],
E
E [[1.0213]],
E
E [[0.8859]],
E
E [[0.9770]],
E
E [[0.9892]],
E
E [[1.8138]],
E
E [[0.2374]],
E
E [[0.6005]],
E
E [[0.9480]],
E
E [[0.7636]],
E
E [[0.5917]],
E
E [[1.1473]],
E
E [[0.7865]],
E
E [[1.2318]],
E
E [[1.1798]],
E
E [[0.4441]],
E
E [[1.2248]],
E
E [[0.0906]],
E
E [[1.0227]],
E
E [[1.1868]],
E
E [[1.3895]],
E
E [[0.9885]],
E
E [[1.0885]],
E
E [[1.2370]],
E
E [[1.3197]],
E
E [[1.7293]],
E
E [[0.7099]],
E
E [[1.0727]],
E
E [[1.4802]],
E
E [[0.2418]],
E
E [[1.2931]],
E
E [[0.1234]],
E
E [[0.2045]],
E
E [[1.1550]],
E
E [[1.1481]],
E
E [[0.8601]],
E
E [[0.3194]],
E
E [[1.0265]],
E
E [[1.6777]],
E
E [[1.4126]],
E
E [[1.2075]],
E
E [[1.5906]],
E
E [[1.7955]],
E
E [[1.6138]],
E
E [[0.8287]],
E
E [[0.7832]],
E
E [[1.5611]],
E
E [[1.1540]],
E
E [[1.4845]],
E
E [[0.9444]],
E
E [[1.4038]],
E
E [[0.2444]],
E
E [[1.0907]],
E
E [[0.7321]],
E
E [[0.9338]],
E
E [[1.4633]],
E
E [[1.4846]],
E
E [[0.9817]],
E
E [[0.8220]],
E
E [[0.5517]],
E
E [[0.4593]],
E
E [[1.1572]],
E
E [[1.1279]],
E
E [[0.8503]],
E
E [[1.1452]],
E
E [[1.2103]],
E
E [[1.4099]],
E
E [[1.3334]],
E
E [[0.5659]],
E
E [[0.9579]],
E
E [[1.2076]],
E
E [[0.8140]],
E
E [[0.4364]],
E
E [[0.4326]],
E
E [[1.3254]],
E
E [[1.5006]],
E
E [[1.0986]],
E
E [[1.2798]],
E
E [[0.1399]],
E
E [[1.0973]],
E
E [[0.5150]],
E
E [[0.4405]],
E
E [[0.7021]],
E
E [[0.7749]],
E
E [[0.3826]],
E
E [[1.3529]],
E
E [[0.5807]],
E
E [[1.3827]],
E
E [[1.0387]],
E
E [[1.0401]],
E
E [[1.1456]],
E
E [[0.9159]],
E
E [[1.1292]],
E
E [[1.1570]],
E
E [[0.8227]],
E
E [[0.9625]],
E
E [[0.4529]],
E
E [[1.6120]],
E
E [[0.3370]],
E
E [[0.8798]],
E
E [[0.8990]],
E
E [[0.9069]],
E
E [[1.1384]],
E
E [[0.4043]],
E
E [[1.1263]],
E
E [[1.6357]],
E
E [[0.2014]],
E
E [[1.0763]],
E
E [[1.7242]],
E
E [[1.3095]],
E
E [[0.8056]],
E
E [[0.6268]],
E
E [[0.8594]],
E
E [[1.1548]],
E
E [[1.3673]],
E
E [[1.2529]],
E
E [[1.3709]],
E
E [[0.8524]],
E
E [[0.3360]],
E
E [[0.9610]],
E
E [[1.6263]],
E
E [[1.1979]],
E
E [[1.0936]],
E
E [[0.8229]],
E
E [[0.1400]],
E
E [[0.1710]],
E
E [[1.3183]],
E
E [[0.9412]],
E
E [[1.1481]],
E
E [[0.1308]],
E
E [[1.6401]],
E
E [[0.4380]],
E
E [[0.8501]],
E
E [[0.6224]],
E
E [[1.3691]],
E
E [[0.9949]],
E
E [[0.9480]],
E
E [[0.5096]],
E
E [[0.6035]],
E
E [[1.3836]],
E
E [[0.5854]],
E
E [[1.1615]],
E
E [[0.4958]],
E
E [[1.3817]],
E
E [[1.2827]],
E
E [[0.5330]],
E
E [[1.7376]],
E
E [[0.6885]],
E
E [[0.6305]],
E
E [[1.0935]],
E
E [[0.9451]],
E
E [[0.9119]],
E
E [[1.4459]],
E
E [[0.1576]],
E
E [[1.1332]],
E
E [[0.4227]],
E
E [[1.6680]],
E
E [[1.2446]],
E
E [[1.4015]],
E
E [[1.3785]],
E
E [[0.8574]],
E
E [[0.6743]],
E
E [[1.0355]],
E
E [[1.4556]],
E
E [[1.5962]],
E
E [[1.2893]],
E
E [[0.7187]],
E
E [[0.8241]],
E
E [[0.6641]],
E
E [[0.2641]],
E
E [[0.5556]],
E
E [[1.2640]],
E
E [[1.0477]],
E
E [[1.3508]],
E
E [[1.2026]],
E
E [[1.1886]],
E
E [[0.6963]],
E
E [[0.9974]],
E
E [[0.5141]],
E
E [[1.0149]],
E
E [[1.0223]],
E
E [[1.0912]],
E
E [[1.3983]],
E
E [[0.8444]],
E
E [[0.8843]],
E
E [[1.1670]],
E
E [[0.6696]],
E
E [[1.3738]],
E
E [[1.0526]],
E
E [[1.2752]],
E
E [[0.3498]],
E
E [[1.0497]],
E
E [[1.2026]],
E
E [[0.3160]],
E
E [[1.1349]],
E
E [[1.4383]],
E
E [[1.6453]],
E
E [[1.3276]],
E
E [[0.2614]],
E
E [[1.5299]],
E
E [[0.2603]],
E
E [[0.6403]],
E
E [[0.4558]],
E
E [[0.9367]],
E
E [[1.3269]],
E
E [[1.8576]],
E
E [[0.6303]],
E
E [[1.4144]],
E
E [[1.6265]],
E
E [[0.7495]],
E
E [[0.7885]],
E
E [[1.0388]],
E
E [[0.9865]],
E
E [[1.3551]],
E
E [[1.5915]],
E
E [[1.2005]],
E
E [[0.8924]],
E
E [[1.2920]],
E
E [[1.6775]],
E
E [[0.8823]],
E
E [[1.4878]],
E
E [[0.1983]],
E
E [[1.4544]],
E
E [[0.7748]],
E
E [[0.2464]],
E
E [[1.3077]],
E
E [[1.4468]],
E
E [[1.0747]],
E
E [[0.6312]],
E
E [[0.7790]],
E
E [[0.9196]],
E
E [[1.1931]],
E
E [[1.2566]],
E
E [[0.6364]],
E
E [[0.8211]],
E
E [[1.3929]],
E
E [[1.4142]],
E
E [[1.6697]],
E
E [[1.5049]],
E
E [[1.1232]],
E
E [[1.0635]],
E
E [[1.4802]],
E
E [[1.2590]],
E
E [[0.7796]],
E
E [[0.4875]],
E
E [[0.8763]],
E
E [[0.4816]],
E
E [[1.3006]],
E
E [[0.7318]],
E
E [[0.8392]],
E
E [[1.7960]],
E
E [[1.3755]],
E
E [[0.6008]],
E
E [[1.4453]],
E
E [[0.9797]],
E
E [[1.0866]],
E
E [[1.0618]],
E
E [[2.0797]],
E
E [[1.6812]],
E
E [[2.5374]],
E
E [[1.4114]],
E
E [[1.3509]],
E
E [[0.5059]],
E
E [[0.8936]],
E
E [[0.7964]],
E
E [[1.8690]],
E
E [[2.1023]],
E
E [[1.8946]],
E
E [[1.0272]],
E
E [[0.9288]],
E
E [[1.8206]],
E
E [[1.4754]],
E
E [[1.3074]],
E
E [[2.0077]],
E
E [[1.1789]],
E
E [[2.2261]],
E
E [[1.3041]],
E
E [[1.4190]],
E
E [[1.3649]],
E
E [[1.8771]],
E
E [[1.8186]],
E
E [[3.4382]],
E
E [[2.2413]],
E
E [[2.0677]],
E
E [[0.9465]],
E
E [[1.2515]],
E
E [[0.9109]],
E
E [[1.9000]],
E
E [[2.4696]],
E
E [[2.2930]],
E
E [[2.2815]],
E
E [[1.5966]],
E
E [[2.3724]],
E
E [[0.8604]],
E
E [[1.5051]],
E
E [[2.4747]],
E
E [[1.3839]],
E
E [[2.6422]],
E
E [[1.9355]],
E
E [[2.6077]],
E
E [[3.5365]],
E
E [[2.5946]],
E
E [[2.4315]],
E
E [[2.0030]],
E
E [[2.0260]],
E
E [[1.8532]],
E
E [[2.0702]],
E
E [[2.1804]],
E
E [[1.3509]],
E
E [[1.3079]],
E
E [[1.8872]],
E
E [[3.2374]],
E
E [[2.5755]],
E
E [[2.5748]],
E
E [[1.7180]],
E
E [[1.9157]],
E
E [[2.1155]],
E
E [[2.0252]],
E
E [[2.1283]],
E
E [[2.6453]],
E
E [[1.8267]],
E
E [[2.7580]],
E
E [[1.2133]],
E
E [[3.0068]],
E
E [[2.4631]],
E
E [[3.5406]],
E
E [[2.6474]],
E
E [[1.5911]],
E
E [[2.0927]],
E
E [[1.9897]],
E
E [[1.1013]],
E
E [[2.2189]],
E
E [[1.8797]],
E
E [[1.7149]],
E
E [[1.8627]],
E
E [[1.7744]],
E
E [[1.2092]],
E
E [[1.9356]],
E
E [[3.0628]],
E
E [[1.8478]],
E
E [[2.9875]],
E
E [[1.5559]],
E
E [[1.8827]],
E
E [[2.6651]],
E
E [[0.9461]],
E
E [[1.6017]],
E
E [[1.6695]],
E
E [[2.5003]],
E
E [[2.0576]],
E
E [[1.9773]],
E
E [[2.5010]],
E
E [[1.9855]],
E
E [[2.6145]],
E
E [[1.5627]],
E
E [[1.5144]],
E
E [[2.3694]],
E
E [[1.9382]],
E
E [[1.3083]],
E
E [[1.9379]],
E
E [[2.4450]],
E
E [[2.2285]],
E
E [[1.9401]],
E
E [[1.7861]],
E
E [[2.8128]],
E
E [[2.4591]],
E
E [[1.8723]],
E
E [[2.0155]],
E
E [[2.3503]],
E
E [[1.1160]],
E
E [[2.1577]],
E
E [[3.0725]],
E
E [[2.0452]],
E
E [[0.6609]],
E
E [[2.4660]],
E
E [[2.6077]],
E
E [[2.1282]],
E
E [[1.7682]],
E
E [[2.1097]],
E
E [[1.4182]],
E
E [[1.2314]],
E
E [[1.5169]],
E
E [[1.6490]],
E
E [[1.9213]],
E
E [[2.4415]],
E
E [[2.2367]],
E
E [[2.2775]],
E
E [[2.6166]],
E
E [[2.1888]],
E
E [[1.9791]],
E
E [[1.9199]],
E
E [[2.5306]],
E
E [[2.4902]],
E
E [[2.8823]],
E
E [[2.0688]],
E
E [[1.8810]],
E
E [[1.0920]],
E
E [[2.3474]],
E
E [[2.4195]],
E
E [[1.9421]],
E
E [[0.5074]],
E
E [[1.7197]],
E
E [[0.8028]],
E
E [[1.4251]],
E
E [[1.6292]],
E
E [[1.6941]],
E
E [[1.4418]],
E
E [[2.7376]],
E
E [[1.3668]],
E
E [[1.7058]],
E
E [[1.9953]],
E
E [[2.4717]],
E
E [[1.0840]],
E
E [[2.1752]],
E
E [[1.1282]],
E
E [[1.7456]],
E
E [[2.2936]],
E
E [[2.1847]],
E
E [[2.0868]],
E
E [[0.4776]],
E
E [[2.6363]],
E
E [[2.3964]],
E
E [[2.2233]],
E
E [[2.1941]],
E
E [[1.0426]],
E
E [[1.0969]],
E
E [[1.7524]],
E
E [[2.8103]],
E
E [[2.5829]],
E
E [[2.1974]],
E
E [[1.6278]],
E
E [[1.8984]],
E
E [[1.5967]],
E
E [[1.8116]],
E
E [[2.0388]],
E
E [[1.2349]],
E
E [[2.2243]],
E
E [[1.9059]],
E
E [[0.7112]],
E
E [[1.3967]],
E
E [[2.2582]],
E
E [[1.9320]],
E
E [[2.0975]],
E
E [[0.8482]],
E
E [[1.9527]],
E
E [[2.3510]],
E
E [[2.3400]],
E
E [[2.5700]],
E
E [[2.5577]],
E
E [[1.8749]],
E
E [[1.5995]],
E
E [[1.1973]],
E
E [[1.8890]],
E
E [[1.2458]],
E
E [[1.1657]],
E
E [[1.5863]],
E
E [[1.8063]],
E
E [[1.9647]],
E
E [[2.4828]],
E
E [[2.3427]],
E
E [[2.1381]],
E
E [[2.0973]],
E
E [[2.0794]],
E
E [[2.0545]],
E
E [[2.4333]],
E
E [[1.9909]],
E
E [[2.5594]],
E
E [[1.7360]],
E
E [[1.3406]],
E
E [[2.4576]],
E
E [[1.5924]],
E
E [[0.8783]],
E
E [[1.9326]],
E
E [[2.5579]],
E
E [[2.5777]],
E
E [[0.7577]],
E
E [[3.0607]],
E
E [[1.9372]],
E
E [[2.2474]],
E
E [[1.8932]],
E
E [[1.8002]],
E
E [[2.3377]],
E
E [[2.6462]],
E
E [[2.0837]],
E
E [[2.9944]],
E
E [[2.2693]],
E
E [[1.8470]],
E
E [[2.0004]],
E
E [[1.0882]],
E
E [[2.2923]],
E
E [[1.8939]],
E
E [[1.6876]],
E
E [[2.1404]],
E
E [[2.1730]],
E
E [[2.0086]],
E
E [[2.4434]],
E
E [[1.8390]],
E
E [[2.5747]],
E
E [[1.5938]],
E
E [[1.2155]],
E
E [[1.7041]],
E
E [[1.6980]],
E
E [[2.0306]],
E
E [[2.0170]],
E
E [[1.1282]],
E
E [[2.1298]],
E
E [[2.4408]],
E
E [[1.2412]],
E
E [[2.5821]],
E
E [[1.6976]],
E
E [[1.4172]],
E
E [[1.6449]],
E
E [[1.7079]],
E
E [[2.0430]],
E
E [[2.6771]],
E
E [[2.5491]],
E
E [[2.7351]],
E
E [[2.0681]],
E
E [[1.9967]],
E
E [[2.7066]],
E
E [[2.1135]],
E
E [[2.1336]],
E
E [[1.1756]],
E
E [[1.6907]],
E
E [[2.1122]],
E
E [[1.2308]],
E
E [[1.0500]],
E
E [[3.4394]],
E
E [[1.1448]],
E
E [[1.3113]],
E
E [[2.0852]],
E
E [[1.6113]],
E
E [[1.1555]],
E
E [[1.5977]],
E
E [[2.3042]],
E
E [[0.6561]],
E
E [[1.4503]],
E
E [[1.4592]],
E
E [[2.1307]],
E
E [[0.9457]],
E
E [[1.6667]],
E
E [[0.6475]],
E
E [[1.5688]],
E
E [[2.2034]],
E
E [[1.4415]],
E
E [[1.9518]],
E
E [[1.0384]],
E
E [[2.0780]],
E
E [[1.2406]],
E
E [[1.7417]],
E
E [[1.4410]],
E
E [[1.4012]],
E
E [[1.3812]],
E
E [[1.4452]],
E
E [[2.3342]],
E
E [[0.9673]],
E
E [[0.7199]],
E
E [[1.2747]],
E
E
Check failure on line 24 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex0-[((448, 1280), torch.float32), ((1, 1), torch.int64)]]
IndexError: index 540 is out of bounds for dimension 0 with size 448
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex0'>, [((448, 1280), torch.float32), ((1, 1), torch.int64)], {'model_name': ['pt_whisper_openai_whisper_large_speech_recognition_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0a0aef0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:24: in forward
advindex_output_1 = forge.op.AdvIndex("", advindex_input_0, advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.4963, 0.7682, 0.0885, ..., 0.7358, 0.8219, 0.8401],
[0.1265, 0.4421, 0.5730, ..., 0.4247, 0.5047...8932, ..., 0.4350, 0.4742, 0.0554],
[0.8759, 0.2447, 0.5269, ..., 0.3116, 0.5214, 0.7686]]), tensor([[540]])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
> ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
E IndexError: index 540 is out of bounds for dimension 0 with size 448
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:121: IndexError
Check failure on line 24 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex0-[((448, 384), torch.float32), ((1, 1), torch.int64)]]
IndexError: index 967 is out of bounds for dimension 0 with size 448
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex0'>, [((448, 384), torch.float32), ((1, 1), torch.int64)], {'model_name': ['pt_whisper_openai_whisper_tiny_speech_recognition_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0aadfc0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:24: in forward
advindex_output_1 = forge.op.AdvIndex("", advindex_input_0, advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.4963, 0.7682, 0.0885, ..., 0.3720, 0.7200, 0.9455],
[0.6654, 0.9998, 0.7593, ..., 0.1497, 0.3923...7251, ..., 0.2890, 0.9094, 0.5943],
[0.4656, 0.9008, 0.5421, ..., 0.4441, 0.7666, 0.1319]]), tensor([[967]])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
> ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
E IndexError: index 967 is out of bounds for dimension 0 with size 448
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:121: IndexError
Check failure on line 24 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex0-[((448, 768), torch.float32), ((1, 1), torch.int64)]]
IndexError: index 686 is out of bounds for dimension 0 with size 448
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex0'>, [((448, 768), torch.float32), ((1, 1), torch.int64)], {'model_name': ['pt_whisper_openai_whisper_small_speech_recognition_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0e83490>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:24: in forward
advindex_output_1 = forge.op.AdvIndex("", advindex_input_0, advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.4963, 0.7682, 0.0885, ..., 0.1497, 0.3923, 0.9338],
[0.1164, 0.3539, 0.6640, ..., 0.5025, 0.4458...9689, ..., 0.7033, 0.2813, 0.8834],
[0.0584, 0.6134, 0.5302, ..., 0.7396, 0.1252, 0.8320]]), tensor([[686]])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
> ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
E IndexError: index 686 is out of bounds for dimension 0 with size 448
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:121: IndexError
Check failure on line 34 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex1-[((1, 2), torch.float32)]]
IndexError: index 933 is out of bounds for dimension 0 with size 1
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex1'>, [((1, 2), torch.float32)], {'model_name': ['pt_gptneo_eleutherai_g...uct_seq_cls_hf', 'pt_llama3_meta_llama_llama_3_1_8b_seq_cls_hf', 'pt_llama3_meta_llama_llama_3_2_1b_seq_cls_hf', ...]})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0a09090>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:34: in forward
advindex_output_1 = forge.op.AdvIndex("", advindex_input_0, self.get_constant("advindex1_const_1"))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.4963, 0.7682]]), tensor([933])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
else:
> ret = t_ops[0][t_ops[1].numpy()]
E IndexError: index 933 is out of bounds for dimension 0 with size 1
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:123: IndexError
Check failure on line 24 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex0-[((32, 2), torch.float32), ((1,), torch.int64)]]
IndexError: index 775 is out of bounds for dimension 0 with size 32
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex0'>, [((32, 2), torch.float32), ((1,), torch.int64)], {'model_name': ['pt_opt_facebook_opt_1_3b_seq_cls_hf', 'pt_opt_facebook_opt_125m_seq_cls_hf', 'pt_opt_facebook_opt_350m_seq_cls_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0ae9360>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:24: in forward
advindex_output_1 = forge.op.AdvIndex("", advindex_input_0, advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.4963, 0.7682],
[0.0885, 0.1320],
[0.3074, 0.6341],
[0.4901, 0.8964],
[0.45... [0.5932, 0.1123],
[0.1535, 0.2417],
[0.7262, 0.7011],
[0.2038, 0.6511]]), tensor([775])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
else:
> ret = t_ops[0][t_ops[1].numpy()]
E IndexError: index 775 is out of bounds for dimension 0 with size 32
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:123: IndexError
Check failure on line 60 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex3-[((2401,), torch.int64)]]
IndexError: index 659 is out of bounds for dimension 0 with size 169
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex3'>, [((2401,), torch.int64)], {'model_name': ['pt_swin_microsoft_swin_tiny_patch4_window7_224_img_cls_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0aadf30>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:60: in forward
advindex_output_1 = forge.op.AdvIndex("", self.get_parameter("advindex3.weight_0"), advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.5021, 0.1993, 0.7877, 0.3959, 0.5343, 0.2395],
[0.0765, 0.1679, 0.4451, 0.1380, 0.9928, 0.5054],
... [0.6022, 0.9509, 0.7530, 0.4840, 0.9816, 0.2013]], requires_grad=True), tensor([ 44, 239, 933, ..., 86, 971, 475])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
else:
> ret = t_ops[0][t_ops[1].numpy()]
E IndexError: index 659 is out of bounds for dimension 0 with size 169
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:123: IndexError
Check failure on line 86 in forge/test/models_ops/test_advindex.py
github-actions / TT-Forge-FE Tests
test_advindex.test_module[Advindex5-[((2401,), torch.int64)]]
IndexError: index 616 is out of bounds for dimension 0 with size 169
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_advindex.Advindex5'>, [((2401,), torch.int64)], {'model_name': ['pt_swin_microsoft_swin_tiny_patch4_window7_224_img_cls_hf']})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0e83010>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AdvIndex")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_advindex.py:228:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:683: in generate_initial_graph
context.graph, context.outputs, context.intermediate_tensors, context.inputs, _ = generate_graph(
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:1123: in generate_graph
outputs = module.forward(*outputs)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/module.py:633: in wrap_forward
return orig_forward(*args, **kwargs)
forge/test/models_ops/test_advindex.py:86: in forward
advindex_output_1 = forge.op.AdvIndex("", self.get_parameter("advindex5.weight_0"), advindex_input_1)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/tm.py:162: in AdvIndex
return op("adv_index", name, operandA, operandB, attrs=(dim,)).get_tensor()
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/common.py:82: in get_tensor
result.set_value(get_f_forge_eval(self.cpp_op_type)(values))
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/__init__.py:226: in <lambda>
return lambda *inputs: module_or_class.eval(op_type.op, op_type.attr, *inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
type = 'adv_index', attr = [0]
ops = [tensor([[0.5021, 0.1993, 0.7877, ..., 0.0038, 0.6580, 0.7363],
[0.4000, 0.0579, 0.2118, ..., 0.3656, 0.4324...1380, 0.8632, ..., 0.9592, 0.1974, 0.6280]],
requires_grad=True), tensor([ 44, 239, 933, ..., 86, 971, 475])]
def eval(type, attr, ops):
assert len(ops) == 1 or (
type == "adv_index" and len(ops) == 2
), f"Tensor manipulation ops should have one input {len(ops)} {attr}"
t_ops = to_torch_operands(*ops)
dtype = ops[0].dtype
if type == "transpose":
assert len(attr) == 3, "Transpose should have 3 attributes"
dim0, dim1, orig_size = attr
return torch.transpose(t_ops[0], dim0, dim1)
if type == "reshape":
return t_ops[0].reshape(attr)
if type == "select":
assert len(attr) == 4, "Select should have 4 attributes"
dim, begin, length, stride = attr
zero_shape = list(t_ops[0].shape)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
result = []
for offset in range(0, t_ops[0].shape[dim] - begin, stride):
for i in range(begin, begin + length):
if offset + i < t_ops[0].shape[dim] or stride == t_ops[0].shape[dim]:
result.append(t_ops[0].select(dim, offset + i))
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "gather":
assert len(attr) == 5, "Gather should have 5 attributes"
dim, begin, length, stride, orig_size = attr
x = t_ops[0]
result = []
zero_shape = list(x.shape)
if dim > 0:
dim -= 4
while len(zero_shape) <= abs(dim):
zero_shape = [1] + zero_shape
x = x.unsqueeze(0)
zero_shape[dim] = 1
zero_slice = torch.zeros(zero_shape, dtype=dtype).squeeze(dim)
offset = 0
for i in range(0, orig_size):
range_i = (i - begin) % stride
if i >= begin and range_i < length:
result.append(x.select(dim, offset))
offset += 1
else:
result.append(zero_slice)
return torch.stack(result, dim=dim)
if type == "index":
assert len(attr) == 4, "Index should have 4 attributes"
dim, start, stop, stride = attr
if dim >= 0:
dim -= len(ops[0].shape)
if dim == -5:
return t_ops[0][..., start:stop:stride, :, :, :, :]
elif dim == -4:
return t_ops[0][..., start:stop:stride, :, :, :]
elif dim == -3:
return t_ops[0][..., start:stop:stride, :, :]
elif dim == -2:
return t_ops[0][..., start:stop:stride, :]
elif dim == -1:
return t_ops[0][..., start:stop:stride]
else:
raise NotImplementedError(f"Dim={dim}")
if type == "adv_index":
assert len(attr) == 1, "AdvIndex should have 1 attributes"
dim = attr[0]
assert dim == 0, "Currently not supported"
if len(t_ops[1].shape) > 1:
if len(t_ops[0].shape) > len(t_ops[1].shape) and t_ops[0].shape[0] == 1:
# Padded
ret = torch.unsqueeze(t_ops[0][0][t_ops[1].numpy()], 0)
else:
ret = torch.unsqueeze(t_ops[0][t_ops[1].numpy()], 0)
else:
> ret = t_ops[0][t_ops[1].numpy()]
E IndexError: index 616 is out of bounds for dimension 0 with size 169
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/op/eval/forge/tm.py:123: IndexError
Check failure on line 1472 in forge/test/models_ops/test_avgpool2d.py
github-actions / TT-Forge-FE Tests
test_avgpool2d.test_module[Avgpool2D9-[((1, 1536, 8, 8), torch.float32)]]
RuntimeError: Tensor 1 - stride mismatch: expected [64, 1], got [0, 0]
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_avgpool2d.Avgpool2D9'>, [((1, 1536, 8, 8), torch.float32)], {'model_name': ['pt_inceptio... 'op_params': {'ceil_mode': 'False', 'channel_last': '0', 'count_include_pad': 'False', 'kernel_size': '[3, 3]', ...}})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0aaee60>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "AvgPool2d")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_avgpool2d.py:1472:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:302: in verify
co_out = compiled_model(*inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.compiled_graph_state.CompiledModel object at 0x7fb5c8d0ab60>
inputs = (Forge Tensor: tensor([[[[0.4963, 0.7682, 0.0885, ..., 0.6341, 0.4901, 0.8964],
[0.4556, 0.6323, 0.3489, ......, 0.0106, 0.4972, 0.0324],
[0.6137, 0.5355, 0.7051, ..., 0.0196, 0.1848, 0.0096]]]]), DataFormat.Float32,)
inputs_and_parameters = [tensor([[[[0.4963, 0.7682, 0.0885, ..., 0.6341, 0.4901, 0.8964],
[0.4556, 0.6323, 0.3489, ..., 0.1689, 0....1]]],
[[[0.1111, 0.1111, 0.1111],
[0.1111, 0.1111, 0.1111],
[0.1111, 0.1111, 0.1111]]]])]
def __call__(self, *inputs: AnyTensor) -> List[torch.Tensor]:
"""
Run inference on the compiled model.
Parameters
----------
inputs: [Tensor, ...]
Input tensors
Returns
-------
List[Tensor]
Output tensors
"""
self.inputs = [*to_pt_tensors(inputs)]
inputs_and_parameters = [
*self.inputs,
*self.fwd_compiled_graph_state.get_ordered_constant_tensors(),
*self.fwd_compiled_graph_state.get_ordered_parameter_tensors(),
]
assert all(
[isinstance(t, torch.Tensor) for t in inputs_and_parameters]
), "All inputs should be torch tensors by now."
if self.training() and isinstance(self.framework_module, PyTorchModule):
for name, param in self.framework_module.module.named_parameters():
if param.requires_grad:
our_tensor = self.fwd_compiled_graph_state.get_parameter_tensor(name)
# NOTE: for parameters that require gradients, we want to share the same tensor with the PyTorch
# module. This is because we want to be able to optimize the parameters both on the device
# (through our runtime) and via the torch optimizers. So this ensures that whichever side updates
# the parameter value, the other side can see the change.
#
# This could change in the future, but for now ensure that our premise is correct.
assert param is our_tensor
logger.info(
f"Running model {self.framework_module.get_name()} {self.fwd_compiled_graph_state.graph.get_name()} on device..."
)
> all_outputs = run_binary(self.compiled_binary, int(ProgramId.FORWARD), inputs_and_parameters)
E RuntimeError: Tensor 1 - stride mismatch: expected [64, 1], got [0, 0]
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compiled_graph_state.py:254: RuntimeError
Check failure on line 170 in forge/test/models_ops/test_broadcast.py
github-actions / TT-Forge-FE Tests
test_broadcast.test_module[Broadcast0-[((1, 1, 1, 128), torch.bool)]]
RuntimeError: Generated MLIR module failed verification.
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_broadcast.Broadcast0'>, [((1, 1, 1, 128), torch.bool)], {'model_name': ['pt_distilbert_d...sed_ner_hrl_token_cls_hf', 'pt_distilbert_distilbert_base_uncased_mlm_hf'], 'op_params': {'dim': '-3', 'shape': '12'}})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0a09750>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Broadcast")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_broadcast.py:170:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
context = CompileContext(modules=[Module Broadcast0], graph_name='Broadcast0', compiler_cfg=CompilerConfig(enable_training=False...cles_offset=0, forge_module=<forge._C.ForgeGraphModule object at 0x7fb619d233b0>, compiled_binary=None, attach_to=None)
def run_mlir_compiler(context: CompileContext) -> CompileDepth:
assert context.forge_module is not None
> context.compiled_binary = forge._C.run_mlir_compiler(context.forge_module)
E RuntimeError: Generated MLIR module failed verification.
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:985: RuntimeError
Check failure on line 172 in forge/test/models_ops/test_broadcast.py
github-actions / TT-Forge-FE Tests
test_broadcast.test_module[Broadcast1-[((1, 1, 1, 128), torch.bool)]]
RuntimeError: Tensor 0 - data type mismatch: expected UInt8, got Float32
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_broadcast.Broadcast1'>, [((1, 1, 1, 128), torch.bool)], {'model_name': ['pt_distilbert_d...ased_ner_hrl_token_cls_hf', 'pt_distilbert_distilbert_base_uncased_mlm_hf'], 'op_params': {'dim': '-4', 'shape': '1'}})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0ae93f0>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Broadcast")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_broadcast.py:172:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:302: in verify
co_out = compiled_model(*inputs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.compiled_graph_state.CompiledModel object at 0x7fb5c8d85330>
inputs = (Forge Tensor: tensor([[[[0.4963, 0.7682, 0.0885, 0.1320, 0.3074, 0.6341, 0.4901, 0.8964,
0.4556, 0.6323, 0...5226, 0.5730, 0.6186,
0.6962, 0.5300, 0.2560, 0.7366, 0.0204, 0.2036, 0.3748, 0.2564]]]]), DataFormat.Int8,)
inputs_and_parameters = [tensor([[[[0.4963, 0.7682, 0.0885, 0.1320, 0.3074, 0.6341, 0.4901, 0.8964,
0.4556, 0.6323, 0.3489, 0.4017,...0.0784, 0.3756, 0.5226, 0.5730, 0.6186,
0.6962, 0.5300, 0.2560, 0.7366, 0.0204, 0.2036, 0.3748, 0.2564]]]])]
def __call__(self, *inputs: AnyTensor) -> List[torch.Tensor]:
"""
Run inference on the compiled model.
Parameters
----------
inputs: [Tensor, ...]
Input tensors
Returns
-------
List[Tensor]
Output tensors
"""
self.inputs = [*to_pt_tensors(inputs)]
inputs_and_parameters = [
*self.inputs,
*self.fwd_compiled_graph_state.get_ordered_constant_tensors(),
*self.fwd_compiled_graph_state.get_ordered_parameter_tensors(),
]
assert all(
[isinstance(t, torch.Tensor) for t in inputs_and_parameters]
), "All inputs should be torch tensors by now."
if self.training() and isinstance(self.framework_module, PyTorchModule):
for name, param in self.framework_module.module.named_parameters():
if param.requires_grad:
our_tensor = self.fwd_compiled_graph_state.get_parameter_tensor(name)
# NOTE: for parameters that require gradients, we want to share the same tensor with the PyTorch
# module. This is because we want to be able to optimize the parameters both on the device
# (through our runtime) and via the torch optimizers. So this ensures that whichever side updates
# the parameter value, the other side can see the change.
#
# This could change in the future, but for now ensure that our premise is correct.
assert param is our_tensor
logger.info(
f"Running model {self.framework_module.get_name()} {self.fwd_compiled_graph_state.graph.get_name()} on device..."
)
> all_outputs = run_binary(self.compiled_binary, int(ProgramId.FORWARD), inputs_and_parameters)
E RuntimeError: Tensor 0 - data type mismatch: expected UInt8, got Float32
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compiled_graph_state.py:254: RuntimeError
Check failure on line 170 in forge/test/models_ops/test_broadcast.py
github-actions / TT-Forge-FE Tests
test_broadcast.test_module[Broadcast2-[((1, 12, 1, 128), torch.bool)]]
RuntimeError: Generated MLIR module failed verification.
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_broadcast.Broadcast2'>, [((1, 12, 1, 128), torch.bool)], {'model_name': ['pt_distilbert_...ed_ner_hrl_token_cls_hf', 'pt_distilbert_distilbert_base_uncased_mlm_hf'], 'op_params': {'dim': '-2', 'shape': '128'}})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d0a0ac20>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Broadcast")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
> compiled_model = compile(framework_model, sample_inputs=inputs)
forge/test/models_ops/test_broadcast.py:170:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:251: in compile_main
return forge_compile_from_context(compile_context)
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:293: in forge_compile_from_context
next_stage = stage_to_func[current_stage](context)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
context = CompileContext(modules=[Module Broadcast2], graph_name='Broadcast2', compiler_cfg=CompilerConfig(enable_training=False...cles_offset=0, forge_module=<forge._C.ForgeGraphModule object at 0x7fb5d0ad49f0>, compiled_binary=None, attach_to=None)
def run_mlir_compiler(context: CompileContext) -> CompileDepth:
assert context.forge_module is not None
> context.compiled_binary = forge._C.run_mlir_compiler(context.forge_module)
E RuntimeError: Generated MLIR module failed verification.
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/compile.py:985: RuntimeError
Check failure on line 502 in forge/test/models_ops/test_cast.py
github-actions / TT-Forge-FE Tests
test_cast.test_module[Cast0-[((2, 13, 1), torch.int64)]]
ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[ 44.],
[239.],
[933.],
[760.],
[963.],
[379.],
[427.],
[503.],
[497.],
[683.],
[101.],
[866.],
[756.]],
[[399.],
[878.],
[376.],
[ 56.],
[868.],
[794.],
[ 33.],
[126.],
[119.],
[391.],
[254.],
[824.],
[841.]]]), compiled_model=tensor([[[ 44.],
[ 0.],
[239.],
[ 0.],
[933.],
[ 0.],
[760.],
[ 0.],
[963.],
[ 0.],
[379.],
[ 0.],
[427.]],
[[ 0.],
[503.],
[ 0.],
[497.],
[ 0.],
[683.],
[ 0.],
[101.],
[ 0.],
[866.],
[ 0.],
[756.],
[ 0.]]])
Raw output
forge_module_and_shapes_dtypes = (<class 'test.models_ops.test_cast.Cast0'>, [((2, 13, 1), torch.int64)], {'model_name': ['pt_stereo_facebook_musicgen_...sic_generation_hf', 'pt_stereo_facebook_musicgen_small_music_generation_hf'], 'op_params': {'dtype': 'torch.float32'}})
record_forge_property = <function record_property.<locals>.append_property at 0x7fb5d86bfd00>
@pytest.mark.push
@pytest.mark.parametrize("forge_module_and_shapes_dtypes", forge_modules_and_shapes_dtypes_list, ids=ids_func)
def test_module(forge_module_and_shapes_dtypes, record_forge_property):
record_forge_property("op_name", "Cast")
forge_module, operand_shapes_dtypes, metadata = forge_module_and_shapes_dtypes
pcc = metadata.pop("pcc")
for metadata_name, metadata_value in metadata.items():
record_forge_property(metadata_name, metadata_value)
max_int = 1000
inputs = [
Tensor.create_from_shape(operand_shape, operand_dtype, max_int=max_int)
for operand_shape, operand_dtype in operand_shapes_dtypes
]
framework_model = forge_module(forge_module.__name__)
framework_model.process_framework_parameters()
for name, parameter in framework_model._parameters.items():
parameter_tensor = Tensor.create_torch_tensor(
shape=parameter.shape.get_pytorch_shape(), dtype=parameter.pt_data_format, max_int=max_int
)
framework_model.set_parameter(name, parameter_tensor)
for name, constant in framework_model._constants.items():
constant_tensor = Tensor.create_torch_tensor(
shape=constant.shape.get_pytorch_shape(), dtype=constant.pt_data_format, max_int=max_int
)
framework_model.set_constant(name, constant_tensor)
compiled_model = compile(framework_model, sample_inputs=inputs)
> verify(inputs, framework_model, compiled_model, VerifyConfig(value_checker=AutomaticValueChecker(pcc=pcc)))
forge/test/models_ops/test_cast.py:502:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/verify.py:333: in verify
verify_cfg.value_checker.check(fw, co)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <forge.verify.value_checkers.AutomaticValueChecker object at 0x7fb5d81266b0>
fw_out = tensor([[[ 44.],
[239.],
[933.],
[760.],
[963.],
[379.],
[427.],... [ 33.],
[126.],
[119.],
[391.],
[254.],
[824.],
[841.]]])
co_out = tensor([[[ 44.],
[ 0.],
[239.],
[ 0.],
[933.],
[ 0.],
[760.],... [ 0.],
[101.],
[ 0.],
[866.],
[ 0.],
[756.],
[ 0.]]])
def check(self, fw_out, co_out):
if not compare_with_golden(fw_out, co_out, self.pcc, self.rtol, self.atol, self.dissimilarity_threshold):
> raise ValueError(
f"Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model={fw_out}, compiled_model={co_out}"
)
E ValueError: Data mismatch -> AutomaticValueChecker (compare_with_golden): framework_model=tensor([[[ 44.],
E [239.],
E [933.],
E [760.],
E [963.],
E [379.],
E [427.],
E [503.],
E [497.],
E [683.],
E [101.],
E [866.],
E [756.]],
E
E [[399.],
E [878.],
E [376.],
E [ 56.],
E [868.],
E [794.],
E [ 33.],
E [126.],
E [119.],
E [391.],
E [254.],
E [824.],
E [841.]]]), compiled_model=tensor([[[ 44.],
E [ 0.],
E [239.],
E [ 0.],
E [933.],
E [ 0.],
E [760.],
E [ 0.],
E [963.],
E [ 0.],
E [379.],
E [ 0.],
E [427.]],
E
E [[ 0.],
E [503.],
E [ 0.],
E [497.],
E [ 0.],
E [683.],
E [ 0.],
E [101.],
E [ 0.],
E [866.],
E [ 0.],
E [756.],
E [ 0.]]])
/opt/ttforge-toolchain/venv/lib/python3.10/site-packages/forge/verify/value_checkers.py:38: ValueError