我想通过ndk和cmake使用c ++ tensorflow API,我可以创建该库,但是当我尝试加载图形时,加载冻结的模型时会出现很多这样的错误:
E /本地:op_kernel.cc:1148 OpKernel(“ op:“ PopulationCount” device_type:“ CPU”约束{名称:“ T” allowed_values {列表{类型:DT_INT32}}}}'),用于未知操作:PopulationCount
因此,我了解到我的库不支持该操作。我是否需要添加另一个库来支持这些操作?
我基于此Cmakelist创建了Cmake脚本,并使用build_android_all.sh脚本构建了armeabi-v7a的依赖项。
这是我的cmake脚本的样子:
cmake_minimum_required(VERSION 3.4.1)
include(ExternalProject)
SET(PROJECT_NAME tf_native_lib)
SET(OpenCV_FOUND true )
SET(PREBUILT_DIR ${TENSORFLOW_ROOT_DIR}/tensorflow/contrib/makefile/gen)
SET(TARGET_NSYNC_LIB ${TENSORFLOW_ROOT_DIR}/tensorflow/contrib/makefile /downloads/nsync/builds/${ANDROID_ABI}.android.c++11)
FIND_PACKAGE(OpenCV REQUIRED)
add_library( # Sets the name of the library.
${PROJECT_NAME}
SHARED
src/main/cpp/native-lib.h
src/main/cpp/native-lib.cpp
src/main/cpp/TensorflowInferenceHandler.h
src/main/cpp/TensorflowInferenceHandler.cpp
)
add_library(lib_proto STATIC IMPORTED )
set_target_properties(lib_proto PROPERTIES IMPORTED_LOCATION
${PREBUILT_DIR}/protobuf_android/${ANDROID_ABI}/lib/libprotobuf.a)
add_library(lib_nsync STATIC IMPORTED )
set_target_properties(lib_nsync PROPERTIES IMPORTED_LOCATION
${TARGET_NSYNC_LIB}/libnsync.a)
add_library(lib_tf STATIC IMPORTED )
set_target_properties(lib_tf PROPERTIES IMPORTED_LOCATION
${PREBUILT_DIR}/lib/android_${ANDROID_ABI}/libtensorflow-core.a)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DIS_SLIM_BUILD \
-std=c++11 -fno-rtti -fno-exceptions \
-O2 -Wno-narrowing -fomit-frame-pointer \
-mfpu=neon -mfloat-abi=softfp -fPIE -fPIC \
-ftemplate-depth=900 \
-DGOOGLE_PROTOBUF_NO_RTTI \
-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} \
-Wl,--allow-multiple-definition \
-Wl,--whole-archive \
-fPIE -pie -v")
# MESSAGE("tensorflow lib dir ${TENSORFLOW_LIB_DIR}")
target_link_libraries( # Specifies the target library.
${PROJECT_NAME}
${OpenCV_LIBS}
android
dl
log
m
z
jnigraphics
lib_tf
lib_proto
lib_nsync)
include_directories(
${OPENCV_INCLUDE_DIRS}
${PREBUILT_DIR}/proto
${PREBUILT_DIR}/protobuf_android/${ANDROID_ABI}/include
${PREBUILT_DIR}/nsync/public
${TENSORFLOW_ROOT_DIR}/tensorflow/contrib/makefile/downloads/eigen
${TENSORFLOW_ROOT_DIR}/bazel-tensorflow/external/nsync/public
${TENSORFLOW_ROOT_DIR}/bazel-genfiles
${TENSORFLOW_ROOT_DIR}
../../../cpp_utils)
如果这不可能,那么将tensorflow_inference.so与我的Jni Sources链接的源代码在哪里?
提前致谢。
乌奈
在PopulationCount
和其他几个行动在声明tensorflow/core/ops/bitwise_ops.cc
。将该文件添加到的末尾/tensorflow/contrib/makefile/tf_op_files.txt
。然后重新编译Tensorflow
。那对我有用。
本文收集自互联网,转载请注明来源。
如有侵权,请联系[email protected] 删除。
我来说两句