Skip to content

Commit

Permalink
Switch tests to use allclose vs almost
Browse files Browse the repository at this point in the history
  • Loading branch information
loadams committed Jan 3, 2025
1 parent 59ef06a commit 68d30cd
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions tests/unit/ops/transformer/inference/test_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import torch
import deepspeed
from deepspeed.accelerator import get_accelerator
from .inference_test_utils import assert_almost_equal
from .inference_test_utils import allclose


# reference timplementation
Expand Down Expand Up @@ -88,4 +88,4 @@ def test_attention(BATCH, H, N_CTX, D_HEAD, causal, use_flash, dtype=torch.float
use_triton_flash=False,
use_ds_attention=False)
tri_out = tri_out.reshape((BATCH, N_CTX, H, D_HEAD)).permute(0, 2, 1, 3)
assert_almost_equal(ref_out, tri_out)
assert (allclose(ref_out, tri_out))
4 changes: 2 additions & 2 deletions tests/unit/ops/transformer/inference/test_layer_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from deepspeed.accelerator import get_accelerator
from deepspeed.ops.op_builder import InferenceBuilder
from deepspeed.ops.transformer.inference.op_binding.layer_norm import LayerNormOp
from .inference_test_utils import allclose, get_dtypes, assert_almost_equal
from .inference_test_utils import allclose, get_dtypes
try:
import triton # noqa: F401 # type: ignore
from deepspeed.ops.transformer.inference.triton import (
Expand Down Expand Up @@ -188,4 +188,4 @@ def test_triton_layer_norm(M, N, dtype, residual, input_bias, eps=1e-5, device='
y_ref = torch.nn.functional.layer_norm(x + res + (x_bias if input_bias else 0), w_shape, weight, bias,
eps).to(dtype)
# compare
assert_almost_equal(y_tri, y_ref)
assert (allclose(y_tri, y_ref))

0 comments on commit 68d30cd

Please sign in to comment.