Skip to content

Commit

Permalink
Add SOFT_ASSERT to gracefully recover from invariant violations (pyto…
Browse files Browse the repository at this point in the history
…rch#82689)

Summary: Implement SOFT_ASSERT that only fails in debug mode, but only trigger a warning log in release mode. This allows us to gracefully handle some of the invariant violation when processing traces that doesn't necessarily need to crash the entire program.

Test Plan: Added SOFT_ASSERT test in containers.cpp

Differential Revision: D38327334

Pull Request resolved: pytorch#82689
Approved by: https://github.com/robieta
  • Loading branch information
davidchencsl authored and pytorchmergebot committed Aug 10, 2022
1 parent cda210e commit 90821aa
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 0 deletions.
15 changes: 15 additions & 0 deletions test/cpp/profiler/containers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,18 @@ TEST(ProfilerTest, clock_converter) {
EXPECT_LT(std::abs(deltas[n / 2]), 200);
EXPECT_LT(deltas[n * 3 / 4] - deltas[n / 4], 50);
}

TEST(ProfilerTest, soft_assert) {
EXPECT_TRUE(SOFT_ASSERT(true));
torch::profiler::impl::setSoftAssertRaises(true);
EXPECT_ANY_THROW(SOFT_ASSERT(false));
torch::profiler::impl::setSoftAssertRaises(false);
EXPECT_NO_THROW(SOFT_ASSERT(false));
// Reset soft assert behavior to default
torch::profiler::impl::setSoftAssertRaises(c10::nullopt);
#ifdef NDEBUG
EXPECT_NO_THROW(SOFT_ASSERT(false));
#else
EXPECT_ANY_THROW(SOFT_ASSERT(false));
#endif
}
2 changes: 2 additions & 0 deletions torch/csrc/autograd/init.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -336,6 +336,8 @@ PyObject* THPAutograd_initExtension(PyObject* _unused, PyObject* unused) {
.def_property_readonly("duration_time_ns", [](const Result& r) {
return r.endTimeNS() - r.start_time_ns_;
});

m.def("_soft_assert_raises", &setSoftAssertRaises);
}

py::class_<ProfilerResult>(m, "_ProfilerResult")
Expand Down
18 changes: 18 additions & 0 deletions torch/csrc/profiler/util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,24 @@ std::function<time_t(approx_time_t)> ApproximateClockToUnixTimeConverter::
};
}

namespace {
c10::optional<bool> soft_assert_raises_;
} // namespace

void setSoftAssertRaises(c10::optional<bool> value) {
soft_assert_raises_ = value;
}

bool softAssertRaises() {
return soft_assert_raises_.value_or(
#ifdef NDEBUG
false
#else
true
#endif
);
}

// ----------------------------------------------------------------------------
// -- NVTX --------------------------------------------------------------------
// ----------------------------------------------------------------------------
Expand Down
17 changes: 17 additions & 0 deletions torch/csrc/profiler/util.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

#include <ATen/record_function.h>
#include <c10/macros/Macros.h>
#include <c10/util/Optional.h>
#include <torch/csrc/Export.h>
#include <torch/csrc/jit/frontend/source_range.h>

Expand All @@ -35,9 +36,25 @@
#endif
#endif

// TODO: replace with pytorch/rfcs#43 when it is ready.
#define SOFT_ASSERT(cond, ...) \
[&]() -> bool { \
if (C10_UNLIKELY(!(cond))) { \
if (torch::profiler::impl::softAssertRaises()) { \
TORCH_INTERNAL_ASSERT(cond, __VA_ARGS__); \
} else { \
TORCH_WARN(__VA_ARGS__); \
} \
return false; \
} \
return true; \
}()

namespace torch {
namespace profiler {
namespace impl {
TORCH_API bool softAssertRaises();
TORCH_API void setSoftAssertRaises(c10::optional<bool> value);

using time_t = int64_t;
using steady_clock_t = std::conditional<
Expand Down

0 comments on commit 90821aa

Please sign in to comment.