Skip to content

Commit

Permalink
included and use the InferenceEngine library from another GitHub project
Browse files Browse the repository at this point in the history
  • Loading branch information
olibartfast committed Jul 3, 2024
1 parent ece715d commit f7caa2b
Show file tree
Hide file tree
Showing 28 changed files with 61 additions and 1,098 deletions.
58 changes: 21 additions & 37 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,76 +8,60 @@ find_package(spdlog REQUIRED)

message(STATUS "Home path: $ENV{HOME}")


# Define the default backend if not set from the command line
if(NOT DEFINED DEFAULT_BACKEND)
unset(DEFAULT_BACKEND CACHE)
set(DEFAULT_BACKEND "LIBTORCH" CACHE STRING "Default backend for inference" FORCE)
endif()

# Define the supported backends
set(SUPPORTED_BACKENDS "ONNX_RUNTIME" "LIBTORCH" "LIBTENSORFLOW" "OPENCV_DNN" "TENSORRT" "OPENVINO")

# Check if the specified backend is supported
list(FIND SUPPORTED_BACKENDS ${DEFAULT_BACKEND} SUPPORTED_BACKEND_INDEX)
if (SUPPORTED_BACKEND_INDEX EQUAL -1)
message(STATUS "Unsupported default backend: ${DEFAULT_BACKEND}")
set(DEFAULT_BACKEND "OPENCV_DNN" CACHE STRING "Default backend for inference" FORCE)
endif()

message(STATUS "Default backend: ${DEFAULT_BACKEND}")

# Fetch the VideoCapture project from GitHub
include(FetchContent)
FetchContent_Declare(
VideoCaptureLib
VideoCapture
GIT_REPOSITORY https://github.com/olibartfast/VideoCapture.git
GIT_TAG master # or the specific tag/branch you want to use
)

FetchContent_MakeAvailable(VideoCaptureLib)
FetchContent_MakeAvailable(VideoCapture)
message(STATUS "VideoCapture_SOURCE_DIR module path: ${VideoCapture_SOURCE_DIR}")


message(STATUS "VideoCaptureLib_SOURCE_DIR module path: ${VideoCaptureLib_SOURCE_DIR}")
# Fetch the InferenceEngines project from GitHub
include(FetchContent)
FetchContent_Declare(
InferenceEngines
GIT_REPOSITORY https://github.com/olibartfast/inference-engines.git
GIT_TAG master # or the specific tag/branch you want to use
)
FetchContent_MakeAvailable(InferenceEngines)
message(STATUS "InferenceEngines_SOURCE_DIR module path: ${InferenceEngines_SOURCE_DIR}")

set(DETECTORS_ROOT ${CMAKE_CURRENT_LIST_DIR}/components/detectors)
set(INFER_ROOT ${CMAKE_CURRENT_LIST_DIR}/components/inference-engines)

list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/cmake)
message(STATUS "Cmake module path: ${CMAKE_MODULE_PATH}")

# Add subdirectories
add_subdirectory(${DETECTORS_ROOT})
add_subdirectory(${INFER_ROOT})

include(AddCompileDefinitions)

# Main executable
set(SOURCES main.cpp)



include(SelectBackend)

add_executable(${PROJECT_NAME} ${SOURCES})

# Include directories and link libraries
target_include_directories(${PROJECT_NAME} PRIVATE
inc
${OpenCV_INCLUDE_DIRS}
${spdlog_INCLUDE_DIRS}
${VideoCaptureLib_SOURCE_DIR}/include
${VideoCaptureLib_SOURCE_DIR}/src
components/inference-engines/src
${VideoCapture_SOURCE_DIR}/include
${VideoCapture_SOURCE_DIR}/src
${InferenceEngines_SOURCE_DIR}/include
${InferenceEngines_SOURCE_DIR}/src
components/detectors/src
)

target_link_libraries(${PROJECT_NAME} PRIVATE
spdlog::spdlog_header_only
${OpenCV_LIBS}
DetectorsLib
InferenceEnginesLib
VideoCaptureLib
InferenceEngines
VideoCapture
)


include(LinkBackend)
include(SetCompilerFlags)
include(SetCompilerFlags)
33 changes: 33 additions & 0 deletions cmake/AddCompileDefinitions.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
if(DEFAULT_BACKEND STREQUAL "OPENCV_DNN")
add_compile_definitions(USE_OPENCV_DNN)
elseif (DEFAULT_BACKEND STREQUAL "ONNX_RUNTIME")
set(ORT_VERSION "1.15.1" CACHE STRING "Onnx runtime version") # modify accordingly
set(ONNX_RUNTIME_DIR $ENV{HOME}/onnxruntime-linux-x64-gpu-${ORT_VERSION} CACHE PATH "Path to onnxruntime")
message(STATUS "Onnx runtime version: ${ORT_VERSION}")
message(STATUS "Onnx runtime directory: ${ONNX_RUNTIME_DIR}")
find_package(CUDA)
if (CUDA_FOUND)
message(STATUS "Found CUDA")
set(CUDA_TOOLKIT_ROOT_DIR /usr/local/cuda)
else ()
message(WARNING "CUDA not found. GPU support will be disabled.")
endif()
add_compile_definitions(USE_ONNX_RUNTIME)
elseif (DEFAULT_BACKEND STREQUAL "LIBTORCH")
set(Torch_DIR $ENV{HOME}/libtorch/share/cmake/Torch/ CACHE PATH "Path to libtorch")
find_package(Torch REQUIRED)
add_compile_definitions(USE_LIBTORCH)
elseif (DEFAULT_BACKEND STREQUAL "TENSORRT")
set(TRT_VERSION "8.6.1.6" CACHE STRING "Tensorrt version") # modify accordingly
set(TENSORRT_DIR $ENV{HOME}/TensorRT-${TRT_VERSION}/)
message(STATUS "TENSORRT_DIR: ${TENSORRT_DIR}")
find_package(CUDA REQUIRED)
include(QueryGpu)
add_compile_definitions(USE_TENSORRT)
elseif (DEFAULT_BACKEND STREQUAL "LIBTENSORFLOW")
find_package(TensorFlow REQUIRED)
add_compile_definitions(USE_LIBTENSORFLOW)
elseif (DEFAULT_BACKEND STREQUAL "OPENVINO")
find_package(OpenVINO REQUIRED)
add_compile_definitions(USE_OPENVINO)
endif()
16 changes: 0 additions & 16 deletions cmake/LibTensorFlow.cmake

This file was deleted.

20 changes: 0 additions & 20 deletions cmake/LibTorch.cmake

This file was deleted.

12 changes: 6 additions & 6 deletions cmake/LinkBackend.cmake
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
# Include framework-specific source files and libraries
if (DEFAULT_BACKEND STREQUAL "OPENCV_DNN")
target_include_directories(${PROJECT_NAME} PRIVATE ${INFER_ROOT}/src/opencv-dnn)
target_include_directories(${PROJECT_NAME} PRIVATE ${InferenceEngines_SOURCE_DIR}/src/opencv-dnn)
elseif (DEFAULT_BACKEND STREQUAL "ONNX_RUNTIME")
target_include_directories(${PROJECT_NAME} PRIVATE ${ONNX_RUNTIME_DIR}/include ${INFER_ROOT}/src/onnx-runtime)
target_include_directories(${PROJECT_NAME} PRIVATE ${ONNX_RUNTIME_DIR}/include ${InferenceEngines_SOURCE_DIR}/src/onnx-runtime)
target_link_directories(${PROJECT_NAME} PRIVATE ${ONNX_RUNTIME_DIR}/lib)
target_link_libraries(${PROJECT_NAME} PRIVATE ${ONNX_RUNTIME_DIR}/lib/libonnxruntime.so)
elseif (DEFAULT_BACKEND STREQUAL "LIBTORCH")
target_include_directories(${PROJECT_NAME} PRIVATE ${INFER_ROOT}/src/libtorch)
target_include_directories(${PROJECT_NAME} PRIVATE ${InferenceEngines_SOURCE_DIR}/src/libtorch)
target_link_libraries(${PROJECT_NAME} PRIVATE ${TORCH_LIBRARIES})
elseif (DEFAULT_BACKEND STREQUAL "TENSORRT")
target_include_directories(${PROJECT_NAME} PRIVATE /usr/local/cuda/include ${TENSORRT_DIR}/include ${INFER_ROOT}/src/tensorrt)
target_include_directories(${PROJECT_NAME} PRIVATE /usr/local/cuda/include ${TENSORRT_DIR}/include ${InferenceEngines_SOURCE_DIR}/src/tensorrt)
target_link_directories(${PROJECT_NAME} PRIVATE /usr/local/cuda/lib64 ${TENSORRT_DIR}/lib)
target_link_libraries(${PROJECT_NAME} PRIVATE nvinfer nvonnxparser cudart)
elseif(DEFAULT_BACKEND STREQUAL "LIBTENSORFLOW" )
target_include_directories(${PROJECT_NAME} PRIVATE ${TensorFlow_INCLUDE_DIRS} ${INFER_ROOT}/src/libtensorflow)
target_include_directories(${PROJECT_NAME} PRIVATE ${TensorFlow_INCLUDE_DIRS} ${InferenceEngines_SOURCE_DIR}/src/libtensorflow)
target_link_libraries(${PROJECT_NAME} PRIVATE ${TensorFlow_LIBRARIES})
elseif(DEFAULT_BACKEND STREQUAL "OPENVINO")
target_include_directories(${PROJECT_NAME} PRIVATE ${InferenceEngine_INCLUDE_DIRS} ${INFER_ROOT}/src/openvino)
target_include_directories(${PROJECT_NAME} PRIVATE ${InferenceEngine_INCLUDE_DIRS} ${InferenceEngines_SOURCE_DIR}/src/openvino)
target_link_libraries(${PROJECT_NAME} PRIVATE openvino::runtime )
endif()
30 changes: 0 additions & 30 deletions cmake/ONNXRuntime.cmake

This file was deleted.

8 changes: 0 additions & 8 deletions cmake/OpenCVdnn.cmake

This file was deleted.

11 changes: 0 additions & 11 deletions cmake/OpenVino.cmake

This file was deleted.

22 changes: 0 additions & 22 deletions cmake/TensorRT.cmake → cmake/QueryGpu.cmake
Original file line number Diff line number Diff line change
@@ -1,13 +1,3 @@
# TensorRT Configuration

set(TRT_VERSION "8.6.1.6" CACHE STRING "Tensorrt version") # modify accordingly
# Set TensorRT directory (modify accordingly)
set(TENSORRT_DIR $ENV{HOME}/TensorRT-${TRT_VERSION}/)

message(STATUS "TENSORRT_DIR: ${TENSORRT_DIR}")

# Find CUDA
find_package(CUDA REQUIRED)
execute_process(
COMMAND nvidia-smi --query-gpu=compute_cap --format=csv,noheader
OUTPUT_VARIABLE GPU_COMPUTE_CAP
Expand Down Expand Up @@ -44,15 +34,3 @@ if (GPU_COMPUTE_CAP_RESULT EQUAL 0)
else()
message("Failed to query GPU compute capability.")
endif()




set(TENSORRT_SOURCES
${INFER_ROOT}/src/tensorrt/TRTInfer.cpp
# Add more TensorRT source files here if needed
)
list(APPEND SOURCES ${TENSORRT_SOURCES})

# Add compile definition to indicate TensorRT usage
add_compile_definitions(USE_TENSORRT)
20 changes: 0 additions & 20 deletions cmake/SelectBackend.cmake

This file was deleted.

2 changes: 1 addition & 1 deletion cmake/SetCompilerFlags.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} ${CUDA_ARCH_FLAG}")

message("CMake CXX Flags Debug: ${CMAKE_CXX_FLAGS_DEBUG}")
message("CMake CXX Flags: ${CMAKE_CXX_FLAGS}")
message("CMake CUDA Flags: ${CMAKE_CUDA_FLAGS}")
message("CMake CUDA Flags: ${CMAKE_CUDA_FLAGS}")
18 changes: 0 additions & 18 deletions components/inference-engines/CMakeLists.txt

This file was deleted.

25 changes: 0 additions & 25 deletions components/inference-engines/src/InferenceInterface.cpp

This file was deleted.

29 changes: 0 additions & 29 deletions components/inference-engines/src/InferenceInterface.hpp

This file was deleted.

Loading

0 comments on commit f7caa2b

Please sign in to comment.