diff --git a/CMakeLists.txt b/CMakeLists.txt index 0896e0f0c..4e03a2fdb 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -182,7 +182,7 @@ endif() if(WITH_BOOST_FS) message(STATUS "With boost filesystem") - find_package(Boost 1.63.0 COMPONENTS system filesystem serialization REQUIRED) + find_package(Boost 1.63.0 COMPONENTS system filesystem REQUIRED) add_definitions(-DWITH_BOOST_FS) SET(FILESYSTEM_LIBRARIES "${Boost_FILESYSTEM_LIBRARY};${Boost_SYSTEM_LIBRARY}") else() diff --git a/include/nifty/skeletons/evaluation.hxx b/include/nifty/skeletons/evaluation.hxx index d098a77e7..173c2cabd 100644 --- a/include/nifty/skeletons/evaluation.hxx +++ b/include/nifty/skeletons/evaluation.hxx @@ -2,8 +2,9 @@ #include "boost/geometry/index/rtree.hpp" #include "boost/serialization/map.hpp" #include "boost/serialization/unordered_map.hpp" -#include "boost/archive/binary_iarchive.hpp" -#include "boost/archive/binary_oarchive.hpp" +// can't build with boost serialization +//#include "boost/archive/binary_iarchive.hpp" +//#include "boost/archive/binary_oarchive.hpp" #include "nifty/z5/z5.hxx" #include "nifty/parallel/threadpool.hxx" @@ -70,6 +71,8 @@ namespace skeletons { // API public: + // needs boost serialization + /* // constructor from serialization SkeletonMetrics(const std::string & segmentationPath, const std::string & skeletonTopFolder, @@ -79,6 +82,7 @@ namespace skeletons { skeletonIds_(skeletonIds){ deserialize(dictSerialization); } + */ // constructor from data SkeletonMetrics(const std::string & segmentationPath, @@ -135,6 +139,9 @@ namespace skeletons { // the label id(s) that contain the merge void getNodesInFalseMergeLabels(std::map> &, const int) const; + // we can't build with boost::serialization right now + // best would be to reimplement this + /* // serialize and deserialize node dictionary with boost::serialization void serialize(const std::string & path) const { std::ofstream os(path.c_str(), std::ofstream::out | std::ofstream::binary); @@ -147,6 +154,7 @@ namespace skeletons { boost::archive::binary_iarchive iarch(is); iarch >> skeletonDict_; } + */ // group skeleton to blocks (= chunks of the segmentation) void groupSkeletonBlocks(SkeletonBlockStorage &, std::vector &, parallel::ThreadPool &); diff --git a/src/python/lib/skeletons/CMakeLists.txt b/src/python/lib/skeletons/CMakeLists.txt index da1c67bc2..0f432813d 100644 --- a/src/python/lib/skeletons/CMakeLists.txt +++ b/src/python/lib/skeletons/CMakeLists.txt @@ -8,6 +8,5 @@ addPythonModule( LIBRRARIES ${Z5_COMPRESSION_LIBRARIES} ${FILESYSTEM_LIBRARIES} - ${Boost_SERIALIZATION_LIBRARY} Threads::Threads ) diff --git a/src/python/lib/skeletons/evaluation.cxx b/src/python/lib/skeletons/evaluation.cxx index 390775348..cc31a06af 100644 --- a/src/python/lib/skeletons/evaluation.cxx +++ b/src/python/lib/skeletons/evaluation.cxx @@ -19,8 +19,9 @@ namespace skeletons { py::class_(module, "SkeletonMetrics") .def(py::init &, const int>()) - .def(py::init &, const std::string &>()) + // can't build with boost serialization + //.def(py::init &, const std::string &>()) // .def("getNodeAssignments", [](const SelfType & self){return self.getNodeAssignments();}) .def("computeSplitScores", [](const SelfType & self, const int numberOfThreads){ @@ -86,10 +87,11 @@ namespace skeletons { return out; }, py::arg("resolution"), py::arg("numberOfThreads")=-1) // - .def("serialize", [](const SelfType & self, - const std::string & serializationPath){ - self.serialize(serializationPath); - }, py::arg("serializationPath")) + // can't build with boost serialization + //.def("serialize", [](const SelfType & self, + // const std::string & serializationPath){ + // self.serialize(serializationPath); + //}, py::arg("serializationPath")) // .def("mergeFalseSplitNodes", [](const SelfType & self, const int numberOfThreads){ std::map>> out;