Skip to content

Commit

Permalink
[Server] Support encrypt & auth for FD Server (#2018)
Browse files Browse the repository at this point in the history
* [Server] Support GPU encrypt & auth for FD Triton Server

* fix dockerfile proxy env error

* update build scrpits

* remove some logs

---------

Co-authored-by: root <[email protected]>
Co-authored-by: qiuyanjun <[email protected]>
  • Loading branch information
3 people authored Jun 13, 2023
1 parent 73f42e3 commit ff631b7
Show file tree
Hide file tree
Showing 11 changed files with 228 additions and 69 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,7 @@ python/fastdeploy/code_version.py
*.pdmodel
*.pdiparams
*.pdiparams.info
log.txt
log.txt
serving/build
serving/build.encrypt
serving/build.encrypt.auth
13 changes: 7 additions & 6 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,11 @@ if(ENABLE_TRT_BACKEND)
if(EXISTS "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib")
file(REMOVE_RECURSE "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib")
endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED)

if (NOT Python_EXECUTABLE)
find_package(Python COMPONENTS Interpreter Development REQUIRED)
endif()

message(STATUS "Copying ${TRT_DIRECTORY}/lib to ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib ...")
execute_process(COMMAND ${Python_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/copy_directory.py ${TRT_DIRECTORY}/lib ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib)
file(GLOB_RECURSE TRT_STATIC_LIBS ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib/*.a)
Expand Down Expand Up @@ -515,12 +519,9 @@ if(MSVC)
endif()

target_link_libraries(${LIBRARY_NAME} ${DEPEND_LIBS})
# Note(qiuyanjun): Currently, we need to manually link the whole
# leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT_AUTH
# is 'ON'. Will remove this policy while the bug of paddle inference lib with
# auth & encrypt fixed.

if(ENABLE_PADDLE_BACKEND)
enable_paddle_encrypt_auth_link_policy(${LIBRARY_NAME})
set_paddle_encrypt_auth_link_policy(${LIBRARY_NAME})
endif()

if(ANDROID)
Expand Down
27 changes: 17 additions & 10 deletions FastDeploy.cmake.in
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ set(WITH_ANDROID_JAVA @WITH_ANDROID_JAVA@)
set(WITH_ANDROID_TENSOR_FUNCS @WITH_ANDROID_TENSOR_FUNCS@)

# encryption and auth
set(PADDLEINFERENCE_WITH_ENCRYPT_AUTH @PADDLEINFERENCE_WITH_ENCRYPT_AUTH@)
set(PADDLEINFERENCE_WITH_ENCRYPT @PADDLEINFERENCE_WITH_ENCRYPT@)
set(PADDLEINFERENCE_WITH_AUTH @PADDLEINFERENCE_WITH_AUTH@)

set(FASTDEPLOY_LIBS "")
set(FASTDEPLOY_INCS "")
Expand Down Expand Up @@ -152,15 +153,21 @@ if(ENABLE_PADDLE_BACKEND)
else()
set(DNNL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mkldnn/lib/libmkldnn.so.0")
set(IOMP_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mklml/lib/libiomp5.so")
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH)
set(FDMODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_AUTH_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_MODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(LEVELDB_LIB_DIR "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/leveldb/lib/")
list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_AUTH_LIB} ${FDMODEL_MODEL_LIB})
# link_directories(LEVELDB_LIB_DIR)
# list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto -lleveldb)
list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto)
set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB}))
set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE)
list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_MODEL_LIB})
endif()
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB}))
set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE)
list(APPEND FASTDEPLOY_LIBS ${FDMODEL_AUTH_LIB})
endif()
if(PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH)
if(WITH_KUNLUNXIN)
list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto)
endif()
endif()
endif()
list(APPEND FASTDEPLOY_LIBS ${PADDLE_LIB})
Expand Down
120 changes: 72 additions & 48 deletions cmake/paddle_inference.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ endif()

# Custom options for Paddle Inference backend
option(PADDLEINFERENCE_DIRECTORY "Directory of custom Paddle Inference library" OFF)
option(PADDLEINFERENCE_WITH_ENCRYPT_AUTH "Whether the Paddle Inference is built with FD encryption and auth" OFF)

set(PADDLEINFERENCE_PROJECT "extern_paddle_inference")
set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference)
Expand All @@ -43,44 +42,15 @@ if(PADDLEINFERENCE_DIRECTORY)
endif()

include_directories(${PADDLEINFERENCE_INC_DIR})
if(WIN32)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib")
elseif(APPLE)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib")
else()
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so")
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH)
set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib")
endif()
endif(WIN32)

if(PADDLEINFERENCE_DIRECTORY)
# Use custom Paddle Inference libs.
if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference")
file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference")
endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED)
if(NOT Python_EXECUTABLE)
find_package(Python COMPONENTS Interpreter Development REQUIRED)
endif()
message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...")
if(WIN32)
execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
Expand Down Expand Up @@ -166,6 +136,51 @@ else()

endif(PADDLEINFERENCE_DIRECTORY)

# check libs
set(PADDLEINFERENCE_WITH_AUTH OFF)
set(PADDLEINFERENCE_WITH_ENCRYPT OFF)
if(WIN32)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib")
elseif(APPLE)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib")
else()
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so")
# Check whether the encrypt and auth tools exists. only support PADDLEINFERENCE_DIRECTORY now.
if(PADDLEINFERENCE_DIRECTORY)
set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb")
set(FDMODEL_LEVELDB_LIB_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib/libleveldb.a")
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB}))
set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE)
message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_MODEL_LIB} exists, fource PADDLEINFERENCE_WITH_ENCRYPT=${PADDLEINFERENCE_WITH_ENCRYPT}")
endif()
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB}))
set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE)
message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_AUTH_LIB} exists, fource PADDLEINFERENCE_WITH_AUTH=${PADDLEINFERENCE_WITH_AUTH}")
endif()
endif()
endif(WIN32)

# Path Paddle Inference ELF lib file
if(UNIX AND (NOT APPLE) AND (NOT ANDROID))
add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME})
Expand Down Expand Up @@ -198,28 +213,37 @@ set_property(TARGET external_omp PROPERTY IMPORTED_LOCATION
add_dependencies(external_omp ${PADDLEINFERENCE_PROJECT})

set(ENCRYPT_AUTH_LIBS )
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH)
if(PADDLEINFERENCE_WITH_ENCRYPT)
add_library(external_fdmodel STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel PROPERTY IMPORTED_LOCATION
${FDMODEL_LIB})
add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION
${FDMODEL_AUTH_LIB})

add_library(external_fdmodel_model STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_model PROPERTY IMPORTED_LOCATION
${FDMODEL_MODEL_LIB})
add_dependencies(external_fdmodel ${PADDLEINFERENCE_PROJECT})
add_dependencies(external_fdmodel_auth ${PADDLEINFERENCE_PROJECT})
add_dependencies(external_fdmodel_model ${PADDLEINFERENCE_PROJECT})
list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_auth external_fdmodel_model)
list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_model)
endif()

if(PADDLEINFERENCE_WITH_AUTH)
add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION
${FDMODEL_AUTH_LIB})
list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel_auth)
endif()

function(enable_paddle_encrypt_auth_link_policy LIBRARY_NAME)
if(ENABLE_PADDLE_BACKEND AND PADDLEINFERENCE_WITH_ENCRYPT_AUTH)
link_directories(${LEVELDB_LIB_DIR})
target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS} -lssl -lcrypto)
target_link_libraries(${LIBRARY_NAME} ${LEVELDB_LIB_DIR}/libleveldb.a)
set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS
"-Wl,--whole-archive ${LEVELDB_LIB_DIR}/libleveldb.a -Wl,-no-whole-archive")
function(set_paddle_encrypt_auth_link_policy LIBRARY_NAME)
if(ENABLE_PADDLE_BACKEND AND (PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH))
target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS})
# Note(qiuyanjun): Currently, for XPU, we need to manually link the whole
# leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT
# or PADDLEINFERENCE_WITH_AUTH is 'ON'. Will remove this policy while
# the bug of paddle inference lib with auth & encrypt fixed.
if((EXISTS ${FDMODEL_LEVELDB_LIB_LIB}) AND WITH_KUNLUNXIN)
target_link_libraries(${LIBRARY_NAME} -lssl -lcrypto)
link_directories(${FDMODEL_LEVELDB_LIB_DIR})
target_link_libraries(${LIBRARY_NAME} ${FDMODEL_LEVELDB_LIB_LIB})
set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS
"-Wl,--whole-archive ${FDMODEL_LEVELDB_LIB_LIB} -Wl,-no-whole-archive")
endif()
endif()
endfunction()
2 changes: 2 additions & 0 deletions cmake/summary.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ function(fastdeploy_summary)
endif()
if(ENABLE_PADDLE_BACKEND)
message(STATUS " Paddle Inference version : ${PADDLEINFERENCE_VERSION}")
message(STATUS " PADDLE_WITH_ENCRYPT : ${PADDLEINFERENCE_WITH_ENCRYPT}")
message(STATUS " PADDLE_WITH_AUTH : ${PADDLEINFERENCE_WITH_AUTH}")
endif()
if(ENABLE_POROS_BACKEND)
message(STATUS " Poros version : ${POROS_VERSION}")
Expand Down
106 changes: 106 additions & 0 deletions scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
#!/usr/bin/env bash
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -e
set +x

# -------------------------------------------------------------------------------
# readonly global variables
# -------------------------------------------------------------------------------
readonly ROOT_PATH=$(pwd)
readonly BUILD_ROOT=build/Linux
readonly BUILD_DIR="${BUILD_ROOT}/x86_64_gpu"

# -------------------------------------------------------------------------------
# tasks
# -------------------------------------------------------------------------------
__make_build_dir() {
if [ ! -d "${BUILD_DIR}" ]; then
echo "-- [INFO] BUILD_DIR: ${BUILD_DIR} not exists, setup manually ..."
if [ ! -d "${BUILD_ROOT}" ]; then
mkdir -p "${BUILD_ROOT}" && echo "-- [INFO] Created ${BUILD_ROOT} !"
fi
mkdir -p "${BUILD_DIR}" && echo "-- [INFO] Created ${BUILD_DIR} !"
else
echo "-- [INFO] Found BUILD_DIR: ${BUILD_DIR}"
fi
}

__check_cxx_envs() {
if [ $LDFLAGS ]; then
echo "-- [INFO] Found LDFLAGS: ${LDFLAGS}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset LDFLAGS
fi
if [ $CPPFLAGS ]; then
echo "-- [INFO] Found CPPFLAGS: ${CPPFLAGS}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset CPPFLAGS
fi
if [ $CPLUS_INCLUDE_PATH ]; then
echo "-- [INFO] Found CPLUS_INCLUDE_PATH: ${CPLUS_INCLUDE_PATH}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset CPLUS_INCLUDE_PATH
fi
if [ $C_INCLUDE_PATH ]; then
echo "-- [INFO] Found C_INCLUDE_PATH: ${C_INCLUDE_PATH}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset C_INCLUDE_PATH
fi
}

__build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle() {

local FASDEPLOY_INSTALL_DIR="${ROOT_PATH}/${BUILD_DIR}/fastdeploy_install"
cd "${BUILD_DIR}" && echo "-- [INFO] Working Dir: ${PWD}"

cmake -DCMAKE_BUILD_TYPE=Release \
-DWITH_GPU=ON \
-DTRT_DIRECTORY=${TRT_DIRECTORY} \
-DCUDA_DIRECTORY=${CUDA_DIRECTORY} \
-DENABLE_ORT_BACKEND=ON \
-DENABLE_TRT_BACKEND=ON \
-DENABLE_PADDLE_BACKEND=ON \
-DPADDLEINFERENCE_DIRECTORY=${PADDLEINFERENCE_DIRECTORY} \
-DPADDLEINFERENCE_VERSION=${PADDLEINFERENCE_VERSION} \
-DENABLE_OPENVINO_BACKEND=ON \
-DENABLE_PADDLE2ONNX=ON \
-DENABLE_VISION=OFF \
-DENABLE_BENCHMARK=OFF \
-DBUILD_EXAMPLES=OFF \
-DPython_EXECUTABLE=/usr/bin/python3 \
-DCMAKE_INSTALL_PREFIX=${FASDEPLOY_INSTALL_DIR} \
-DLIBRARY_NAME=fastdeploy_runtime \
-Wno-dev ../../.. && make -j8 && make install

echo "-- [INFO][built][x86_64_gpu}][${FASDEPLOY_INSTALL_DIR}]"
echo "-- [INFO][${PADDLEINFERENCE_DIRECTORY}][${PADDLEINFERENCE_VERSION}]"
}

main() {
__make_build_dir
__check_cxx_envs
__build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle
exit 0
}

main

# Usage:
# export PADDLEINFERENCE_DIRECTORY=xxx
# export PADDLEINFERENCE_VERSION=xxx
# export CUDA_DIRECTOY=/usr/local/cuda
# export TRT_DIRECTORY=/home/qiuyanjun/TensorRT-8.5.2.2
# ./scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh
7 changes: 6 additions & 1 deletion serving/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -80,4 +80,9 @@ COPY build/fastdeploy_install/* /opt/fastdeploy/

# Set environment variable
ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib/:/opt/fastdeploy/third_libs/install/tensorrt/lib/:/opt/fastdeploy/third_libs/install/opencv/lib64/:$LD_LIBRARY_PATH"
ENV PATH="/opt/tritonserver/bin:$PATH"
ENV PATH="/opt/tritonserver/bin:$PATH"

ENV http_proxy=
ENV https_proxy=
ENV no_proxy=
ENV TZ=Asia/Shanghai
2 changes: 1 addition & 1 deletion serving/Dockerfile_CUDA_11_2_TRT_8_5_PADDLE_2_4_2
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,5 @@ RUN python3 -m pip install https://paddle-wheel.bj.bcebos.com/2.4.2/linux/linux-
COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/
COPY build/fastdeploy_install /opt/fastdeploy/

ENV LD_LIBRARY_PATH="/opt/TensorRT-8.5.2.2/lib/:/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH"
ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH"
ENV PATH="/opt/tritonserver/bin:$PATH"
Loading

0 comments on commit ff631b7

Please sign in to comment.