Hi everyone! I have a problem with Tensorflow flex delegate on Android.
I’m trying to run simple model with some functions that require Flex Delegate to be imported.
I built libModelTrainer.so and libtensorflowlite_flex.so from tensorflow-r2.14 for Android.
I’m using CMake to build my own .so lib with included tf’s lib. So the first CMakeLists.txt looks like this
cmake_minimum_required(VERSION 3.10)
project(ModelTrainerLib VERSION 1.0)
# Specify where to find the header files for TF Lite C++
set(INCLUDE_DIRS
${CMAKE_CURRENT_LIST_DIR}/include
${CMAKE_CURRENT_LIST_DIR}/include/flatbuffers/include)
include_directories(${INCLUDE_DIRS})
add_library(tflite SHARED IMPORTED)
add_library(tflite-flex SHARED IMPORTED)
set_target_properties(
tflite PROPERTIES IMPORTED_LOCATION
${CMAKE_CURRENT_LIST_DIR}/arm64-v8a/libtensorflowlite.so
)
set_target_properties(
tflite-flex PROPERTIES IMPORTED_LOCATION
${CMAKE_CURRENT_LIST_DIR}/arm64-v8a/libtensorflowlite_flex.so
)
# Source files
set(SOURCES
ModelTrainer.cpp
)
# Build the main target `ModelTrainer` that will use TF Lite
# Create shared library from sources
add_library(ModelTrainer SHARED ${SOURCES})
# Link the main target with required libs `libtensorflowlite.so`
target_link_libraries(ModelTrainer
tflite
-Wl,--no-as-needed # Need --no-as-needed to link tensorflowlite_flex
tflite-flex
)
Than I link my own .so lib with main.cpp like this:
cmake_minimum_required(VERSION 3.13)
project(UseModelTrainerApp)
# Add the executable
add_executable(UseModelTrainerApp main.cpp)
# Create an imported target for libtensorflowlite.so
add_library(libtensorflowlite SHARED IMPORTED)
set_target_properties(libtensorflowlite PROPERTIES
IMPORTED_LOCATION "/Users/andrey/Desktop/mnist_tf/lib/arm64-v8a/libtensorflowlite.so"
INTERFACE_INCLUDE_DIRECTORIES "/Users/andrey/Desktop/mnist_tf/lib/include"
)
# Create an imported target for libtensorflowlite_flex.so
add_library(libtensorflowlite_flex SHARED IMPORTED)
set_target_properties(libtensorflowlite_flex PROPERTIES
IMPORTED_LOCATION "/Users/andrey/Desktop/mnist_tf/lib/arm64-v8a/libtensorflowlite_flex.so"
)
# Do the same for libModelTrainer.so, and specify its dependency on libtensorflowlite
add_library(libModelTrainer SHARED IMPORTED)
set_target_properties(libModelTrainer PROPERTIES
IMPORTED_LOCATION "/Users/andrey/Desktop/mnist_tf/lib/build_android_arm64/libModelTrainer.so"
INTERFACE_INCLUDE_DIRECTORIES "/Users/andrey/Desktop/mnist_tf/lib"
)
target_link_libraries(libModelTrainer
INTERFACE
libtensorflowlite
-Wl,--no-as-needed # Need --no-as-needed to link tensorflowlite_flex
libtensorflowlite_flex
)
# Link UseModelTrainerApp with libModelTrainer only, libtensorflowlite will be included automatically
target_link_libraries(UseModelTrainerApp
-Wl,--no-as-needed
libModelTrainer
)
# If the library uses TensorFlow Lite or other Android NDK libraries, link them as well
find_library(log-lib log)
target_link_libraries(UseModelTrainerApp ${log-lib})
Than I get executable file successfully. I push these files (my own lib, tf’s libs, executable file and model) to my device with adb. Run program and still get error:
ERROR: Select TensorFlow op(s), included in the given model, is(are) not supported by this interpreter. Make sure you apply/link the Flex delegate before inference. For the Android, it can be resolved by adding “org.tensorflow:tensorflow-lite-select-tf-ops” dependency. See instructions: عملگرهای TensorFlow را انتخاب کنید | TensorFlow Lite
ERROR: Node number 5 (FlexSave) failed to prepare.
I have no idea how to fix it, so I will be very grateful for any help.