diff --git a/CMakeLists.txt b/CMakeLists.txt index a7c7b544955..4793fa92404 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -79,14 +79,17 @@ if(ANDROID OR IOS OR ARMLINUX) "Disable DSO when cross-compiling for Android and iOS" FORCE) set(WITH_AVX OFF CACHE STRING "Disable AVX when cross-compiling for Android and iOS" FORCE) - set(LITE_WITH_PYTHON OFF CACHE STRING - "Disable PYTHON when cross-compiling for Android and iOS" FORCE) set(WITH_RDMA OFF CACHE STRING "Disable RDMA when cross-compiling for Android and iOS" FORCE) set(WITH_MKL OFF CACHE STRING "Disable MKL when cross-compiling for Android and iOS" FORCE) endif() +if(ANDROID OR IOS) + set(LITE_WITH_PYTHON OFF CACHE STRING + "Disable PYTHON when cross-compiling for Android and iOS" FORCE) +endif() + set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING "A path setting third party libraries download & build directories.") diff --git a/cmake/cross_compiling/postproject.cmake b/cmake/cross_compiling/postproject.cmake index 33254df03c4..88ac3e101a6 100644 --- a/cmake/cross_compiling/postproject.cmake +++ b/cmake/cross_compiling/postproject.cmake @@ -26,6 +26,8 @@ if(ANDROID) endif() if(ARMLINUX) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC") if(ARMLINUX_ARCH_ABI STREQUAL "armv8") set(CMAKE_CXX_FLAGS "-march=armv8-a ${CMAKE_CXX_FLAGS}") set(CMAKE_C_FLAGS "-march=armv8-a ${CMAKE_C_FLAGS}") @@ -57,7 +59,10 @@ function(check_linker_flag) endfunction() set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") if (LITE_ON_TINY_PUBLISH) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffast-math -Ofast -Os -fno-exceptions -fomit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables") + if(NOT LITE_WITH_PYTHON) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions") + endif() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffast-math -Ofast -Os -fomit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto -fvisibility=hidden -fvisibility-inlines-hidden -fdata-sections -ffunction-sections") check_linker_flag(-Wl,--gc-sections) endif() diff --git a/lite/CMakeLists.txt b/lite/CMakeLists.txt index 9a52cc8d4a1..f0731554df0 100644 --- a/lite/CMakeLists.txt +++ b/lite/CMakeLists.txt @@ -47,6 +47,9 @@ if (WITH_TESTING) endif() endif() +# ----------------------------- PUBLISH ----------------------------- +# The final target for publish lite lib +add_custom_target(publish_inference) if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) # for publish set(INFER_LITE_PUBLISH_ROOT "${CMAKE_BINARY_DIR}/inference_lite_lib.${ARM_TARGET_OS}.${ARM_TARGET_ARCH_ABI}") @@ -59,10 +62,31 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) if (LITE_WITH_FPGA) set(INFER_LITE_PUBLISH_ROOT "${INFER_LITE_PUBLISH_ROOT}.fpga") endif(LITE_WITH_FPGA) - message(STATUS "publish inference lib to ${INFER_LITE_PUBLISH_ROOT}") +else() + set(INFER_LITE_PUBLISH_ROOT "${CMAKE_BINARY_DIR}/inference_lite_lib") +endif() +message(STATUS "publish inference lib to ${INFER_LITE_PUBLISH_ROOT}") + +# add python lib +if (LITE_WITH_PYTHON) + add_custom_target(publish_inference_python_lib ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/python/lib" + COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/python/pybind/liblite_pybind.so" "${INFER_LITE_PUBLISH_ROOT}/python/lib/lite_core.so") + add_custom_target(publish_inference_python_light_demo ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/python" + COMMAND cp "${CMAKE_SOURCE_DIR}/lite/demo/python/mobilenetv1_light_api.py" "${INFER_LITE_PUBLISH_ROOT}/demo/python/") + if (NOT LITE_ON_TINY_PUBLISH) + add_custom_target(publish_inference_python_full_demo ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/python" + COMMAND cp "${CMAKE_SOURCE_DIR}/lite/demo/python/mobilenetv1_full_api.py" "${INFER_LITE_PUBLISH_ROOT}/demo/python/") + add_dependencies(publish_inference publish_inference_python_full_demo) + endif() + add_dependencies(publish_inference_python_lib lite_pybind) + add_dependencies(publish_inference publish_inference_python_lib) + add_dependencies(publish_inference publish_inference_python_light_demo) +endif() - # The final target for publish lite lib - add_custom_target(publish_inference) +if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) if (NOT LITE_ON_TINY_PUBLISH) # add cxx lib add_custom_target(publish_inference_cxx_lib ${TARGET} diff --git a/lite/api/python/pybind/pybind.cc b/lite/api/python/pybind/pybind.cc index 00f083f54d8..3f73c8fbb12 100644 --- a/lite/api/python/pybind/pybind.cc +++ b/lite/api/python/pybind/pybind.cc @@ -72,13 +72,15 @@ void BindLiteApi(py::module *m) { BindLiteCxxPredictor(m); #endif BindLiteLightPredictor(m); - // Global helper methods +// Global helper methods +#ifndef LITE_ON_TINY_PUBLISH m->def("create_paddle_predictor", [](const CxxConfig &config) -> std::unique_ptr { auto x = std::unique_ptr(new CxxPaddleApiImpl()); x->Init(config); return std::move(x); }); +#endif m->def("create_paddle_predictor", [](const MobileConfig &config) -> std::unique_ptr { auto x = diff --git a/lite/demo/python/mobilenetv1_full_api.py b/lite/demo/python/mobilenetv1_full_api.py new file mode 100644 index 00000000000..a31469e3e8d --- /dev/null +++ b/lite/demo/python/mobilenetv1_full_api.py @@ -0,0 +1,67 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +''' +Paddle-Lite full python api demo +''' + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import sys +sys.path.append('../../python/lib') + +from lite_core import * + +# Command arguments +parser = argparse.ArgumentParser() +parser.add_argument( + "--model_dir", default="", type=str, help="Non-combined Model dir path") +parser.add_argument( + "--model_file", default="", type=str, help="Model file") +parser.add_argument( + "--param_file", default="", type=str, help="Combined model param file") + +def RunModel(args): + # 1. Set config information + config = CxxConfig() + if args.model_file != '' and args.param_file != '': + config.set_model_file(args.model_file) + config.set_param_file(args.param_file) + else: + config.set_model_dir(args.model_dir) + # For x86, you can set places = [Place(TargetType.X86, PrecisionType.FP32)] + places = [Place(TargetType.ARM, PrecisionType.FP32)] + config.set_valid_places(places) + + # 2. Create paddle predictor + predictor = create_paddle_predictor(config) + + # 3. Set input data + input_tensor = predictor.get_input(0) + input_tensor.resize([1, 3, 224, 224]) + input_tensor.set_float_data([1.] * 3 * 224 * 224) + + # 4. Run model + predictor.run() + + # 5. Get output data + output_tensor = predictor.get_output(0) + print(output_tensor.shape()) + print(output_tensor.float_data()[:10]) + +if __name__ == '__main__': + args = parser.parse_args() + RunModel(args) diff --git a/lite/demo/python/mobilenetv1_light_api.py b/lite/demo/python/mobilenetv1_light_api.py new file mode 100644 index 00000000000..a44427092ba --- /dev/null +++ b/lite/demo/python/mobilenetv1_light_api.py @@ -0,0 +1,56 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +''' +Paddle-Lite light python api demo +''' + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import sys +sys.path.append('../../python/lib') + +from lite_core import * + +# Command arguments +parser = argparse.ArgumentParser() +parser.add_argument( + "--model_dir", default="", type=str, help="Non-combined Model dir path") + +def RunModel(args): + # 1. Set config information + config = MobileConfig() + config.set_model_dir(args.model_dir) + + # 2. Create paddle predictor + predictor = create_paddle_predictor(config) + + # 3. Set input data + input_tensor = predictor.get_input(0) + input_tensor.resize([1, 3, 224, 224]) + input_tensor.set_float_data([1.] * 3 * 224 * 224) + + # 4. Run model + predictor.run() + + # 5. Get output data + output_tensor = predictor.get_output(0) + print(output_tensor.shape()) + print(output_tensor.float_data()[:10]) + +if __name__ == '__main__': + args = parser.parse_args() + RunModel(args) diff --git a/lite/tools/build.sh b/lite/tools/build.sh index 87e50fd11e8..8463c1497ad 100755 --- a/lite/tools/build.sh +++ b/lite/tools/build.sh @@ -15,6 +15,7 @@ readonly NUM_PROC=${LITE_BUILD_THREADS:-4} # global variables BUILD_EXTRA=OFF BUILD_JAVA=ON +BUILD_PYTHON=OFF BUILD_DIR=$(pwd) readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/third-party-05b862.tar.gz @@ -84,9 +85,11 @@ function make_tiny_publish_so { fi cmake .. \ + ${PYTHON_FLAGS} \ ${CMAKE_COMMON_OPTIONS} \ -DWITH_TESTING=OFF \ -DLITE_WITH_JAVA=$BUILD_JAVA \ + -DLITE_WITH_PYTHON=$BUILD_PYTHON \ -DLITE_SHUTDOWN_LOG=ON \ -DLITE_ON_TINY_PUBLISH=ON \ -DANDROID_STL_TYPE=$android_stl \ @@ -122,9 +125,11 @@ function make_full_publish_so { prepare_workspace $root_dir $build_directory cmake $root_dir \ + ${PYTHON_FLAGS} \ ${CMAKE_COMMON_OPTIONS} \ -DWITH_TESTING=OFF \ -DLITE_WITH_JAVA=$BUILD_JAVA \ + -DLITE_WITH_PYTHON=$BUILD_PYTHON \ -DLITE_SHUTDOWN_LOG=ON \ -DANDROID_STL_TYPE=$android_stl \ -DLITE_BUILD_EXTRA=$BUILD_EXTRA \ @@ -196,6 +201,35 @@ function make_ios { cd - } +function make_cuda { + prepare_thirdparty + + root_dir=$(pwd) + build_directory=$BUILD_DIR/build_cuda + + if [ -d $build_directory ] + then + rm -rf $build_directory + fi + mkdir -p $build_directory + cd $build_directory + + prepare_workspace $root_dir $build_directory + + cmake .. -DWITH_MKL=OFF \ + -DLITE_WITH_CUDA=ON \ + -DWITH_MKLDNN=OFF \ + -DLITE_WITH_X86=OFF \ + -DLITE_WITH_PROFILE=OFF \ + -DWITH_LITE=ON \ + -DLITE_WITH_LIGHT_WEIGHT_FRAMEWORK=OFF \ + -DWITH_TESTING=OFF \ + -DLITE_WITH_ARM=OFF \ + -DLITE_WITH_PYTHON=ON + + make publish_inference_python_lib -j8 + cd - +} function print_usage { set +x @@ -216,6 +250,8 @@ function print_usage { echo echo -e "optional argument:" echo -e "--build_extra: (OFF|ON); controls whether to publish extra operators and kernels for (sequence-related model such as OCR or NLP)" + echo -e "--build_python: (OFF|ON); controls whether to publish python api lib (ANDROID and IOS is not supported)" + echo -e "--build_java: (OFF|ON); controls whether to publish java api lib (Only ANDROID is supported)" echo -e "--build_dir: directory for building" echo echo -e "argument choices:" @@ -269,6 +305,14 @@ function main { BUILD_EXTRA="${i#*=}" shift ;; + --build_python=*) + BUILD_PYTHON="${i#*=}" + shift + ;; + --build_java=*) + BUILD_JAVA="${i#*=}" + shift + ;; --build_dir=*) BUILD_DIR="${i#*=}" shift @@ -293,6 +337,10 @@ function main { build_model_optimize_tool shift ;; + cuda) + make_cuda + shift + ;; *) # unknown option print_usage