Skip to content

Commit

Permalink
Trim libshm deps, move tempfile.h to c10 (pytorch#17019)
Browse files Browse the repository at this point in the history
Summary:
libshm_manager doesn't need to depend on all of libtorch. It only uses tiny tempfile.h which can be moved to c10. I could just duplicate the file too, but it's not worth it as c10 is small enough.
Pull Request resolved: pytorch#17019

Differential Revision: D14052688

Pulled By: dzhulgakov

fbshipit-source-id: 8797d15f8c7c49c49d40b7ab2f43aa3bf6becb0c
  • Loading branch information
Dmytro Dzhulgakov authored and facebook-github-bot committed Feb 14, 2019
1 parent d25fee3 commit 46503a7
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 24 deletions.
9 changes: 9 additions & 0 deletions c10/test/util/tempfile_test.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#include <c10/util/tempfile.h>
#include <gtest/gtest.h>

#if !defined(_WIN32)
TEST(TempFileTest, MatchesExpectedPattern) {
c10::TempFile pattern = c10::make_tempfile("test-pattern-");
ASSERT_NE(pattern.name.find("test-pattern-"), std::string::npos);
}
#endif // !defined(_WIN32)
7 changes: 2 additions & 5 deletions torch/csrc/utils/tempfile.h → c10/util/tempfile.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

#include <c10/util/Exception.h>
#include <c10/util/Optional.h>
#include <torch/csrc/WindowsTorchApiMacro.h>

#include <cerrno>
#include <cstdio>
Expand All @@ -16,8 +15,7 @@
#include <unistd.h>
#endif

namespace torch {
namespace utils {
namespace c10 {
namespace detail {
// Creates the filename pattern passed to and completed by `mkstemp`.
// Returns std::vector<char> because `mkstemp` needs a (non-const) `char*` and
Expand Down Expand Up @@ -102,5 +100,4 @@ inline TempFile make_tempfile(std::string name_prefix = "torch-file-") {
}
AT_ERROR("Error generating temporary file: ", std::strerror(errno));
}
} // namespace utils
} // namespace torch
} // namespace c10
8 changes: 0 additions & 8 deletions test/cpp/api/misc.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#include <gtest/gtest.h>

#include <torch/csrc/utils/tempfile.h>
#include <torch/nn/init.h>
#include <torch/nn/modules/linear.h>
#include <torch/types.h>
Expand Down Expand Up @@ -52,10 +51,3 @@ TEST(NNInitTest, CanInitializeTensorThatRequiresGrad) {
"has been used in an in-place operation");
ASSERT_EQ(torch::nn::init::ones_(tensor).sum().item<int32_t>(), 12);
}

#if !defined(_WIN32)
TEST(TempFileTest, MatchesExpectedPattern) {
torch::utils::TempFile pattern = torch::utils::make_tempfile("test-pattern-");
ASSERT_NE(pattern.name.find("test-pattern-"), std::string::npos);
}
#endif // !defined(_WIN32)
14 changes: 8 additions & 6 deletions test/cpp/api/serialize.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#include <gtest/gtest.h>

#include <c10/util/tempfile.h>

#include <torch/nn/modules/functional.h>
#include <torch/nn/modules/linear.h>
#include <torch/nn/modules/sequential.h>
Expand Down Expand Up @@ -54,7 +56,7 @@ TEST(SerializeTest, BasicToFile) {

auto x = torch::randn({5, 5});

auto tempfile = torch::utils::make_tempfile();
auto tempfile = c10::make_tempfile();
torch::save(x, tempfile.name);

torch::Tensor y;
Expand Down Expand Up @@ -135,7 +137,7 @@ TEST(SerializeTest, XOR) {
epoch++;
}

auto tempfile = torch::utils::make_tempfile();
auto tempfile = c10::make_tempfile();
torch::save(model, tempfile.name);
torch::load(model2, tempfile.name);

Expand All @@ -149,7 +151,7 @@ TEST(SerializeTest, Optim) {
auto model3 = Linear(5, 2);

// Models 1, 2, 3 will have the same parameters.
auto model_tempfile = torch::utils::make_tempfile();
auto model_tempfile = c10::make_tempfile();
torch::save(model1, model_tempfile.name);
torch::load(model2, model_tempfile.name);
torch::load(model3, model_tempfile.name);
Expand Down Expand Up @@ -194,7 +196,7 @@ TEST(SerializeTest, Optim) {
// Do 2 steps of model 3 while saving the optimizer
step(optim3, model3);

auto optim_tempfile = torch::utils::make_tempfile();
auto optim_tempfile = c10::make_tempfile();
torch::save(optim3, optim_tempfile.name);
torch::load(optim3_2, optim_tempfile.name);
step(optim3_2, model3);
Expand Down Expand Up @@ -253,7 +255,7 @@ TEST(SerializeTest, XOR_CUDA) {
epoch++;
}

auto tempfile = torch::utils::make_tempfile();
auto tempfile = c10::make_tempfile();
torch::save(model, tempfile.name);
torch::load(model2, tempfile.name);

Expand All @@ -264,7 +266,7 @@ TEST(SerializeTest, XOR_CUDA) {
loss = getLoss(model2, 100, true);
ASSERT_LT(loss.item<float>(), 0.1);

auto tempfile2 = torch::utils::make_tempfile();
auto tempfile2 = c10::make_tempfile();
torch::save(model2, tempfile2.name);
torch::load(model3, tempfile2.name);

Expand Down
2 changes: 0 additions & 2 deletions test/cpp/common/support.h
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
#pragma once

#include <torch/csrc/utils/tempfile.h>

#include <c10/util/Exception.h>

#include <gtest/gtest.h>
Expand Down
5 changes: 2 additions & 3 deletions torch/lib/libshm/manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@
#include <memory>
#include <unordered_map>

#include <torch/csrc/utils/tempfile.h>
#include <c10/util/Optional.h>
#include <c10/util/tempfile.h>

#include <libshm/err.h>
#include <libshm/socket.h>
Expand Down Expand Up @@ -87,7 +86,7 @@ int main(int argc, char *argv[]) {

std::unique_ptr<ManagerServerSocket> srv_socket;
const auto tempfile =
torch::utils::try_make_tempfile(/*name_prefix=*/"torch-shm-file-");
c10::try_make_tempfile(/*name_prefix=*/"torch-shm-file-");
try {
if (!tempfile.has_value()) {
throw std::runtime_error(
Expand Down

0 comments on commit 46503a7

Please sign in to comment.