Fix compilation and generation of tflite_runtime python package.

New package file structure:
  tflite_runtime
    __init__.py
    interpreter.py
    _interpreter_wrapper.<optional-suffix>.so
    interpreter_wrapper.py

Cross compilation is fully supported for rpi and aarch64 targets:
TENSORFLOW_TARGET=rpi build_pip_package.sh
TENSORFLOW_TARGET=aarch64 build_pip_package.sh

Python version is configurable by PYTHON env var:
PYTHON=python3 build_pip_package.sh
PiperOrigin-RevId: 257690020
This commit is contained in:
A. Unique TensorFlower 2019-07-11 15:05:33 -07:00 committed by TensorFlower Gardener
parent 7b1bbadeda
commit e9bea60124
8 changed files with 103 additions and 72 deletions

View File

@ -40,7 +40,7 @@ try:
except ImportError:
# When full Tensorflow Python PIP is not available do not use lazy load
# and instead of the tflite_runtime path.
from tflite_runtime.lite.python import interpreter_wrapper as _interpreter_wrapper
from tflite_runtime import interpreter_wrapper as _interpreter_wrapper
def tf_export_dummy(*x, **kwargs):
del x, kwargs

View File

@ -79,7 +79,7 @@ BENCHMARK_BINARY_NAME := benchmark_model
# A small example program that shows how to link against the library.
MINIMAL_SRCS := \
tensorflow/lite/examples/minimal/minimal.cc
tensorflow/lite/examples/minimal/minimal.cc
# What sources we want to compile, must be kept in sync with the main Bazel
# build files.
@ -91,7 +91,7 @@ PROFILE_SUMMARIZER_SRCS := \
tensorflow/core/util/stats_calculator.cc
CMD_LINE_TOOLS_SRCS := \
tensorflow/lite/tools/command_line_flags.cc
tensorflow/lite/tools/command_line_flags.cc
CORE_CC_ALL_SRCS := \
$(wildcard tensorflow/lite/*.cc) \
@ -99,19 +99,19 @@ $(wildcard tensorflow/lite/*.c) \
$(wildcard tensorflow/lite/c/*.c) \
$(wildcard tensorflow/lite/core/*.cc) \
$(wildcard tensorflow/lite/core/api/*.cc) \
$(wildcard tensorflow/lite/experimental/ruy/allocator.cc) \
$(wildcard tensorflow/lite/experimental/ruy/block_map.cc) \
$(wildcard tensorflow/lite/experimental/ruy/blocking_counter.cc) \
$(wildcard tensorflow/lite/experimental/ruy/context.cc) \
$(wildcard tensorflow/lite/experimental/ruy/detect_dotprod.cc) \
$(wildcard tensorflow/lite/experimental/ruy/kernel.cc) \
$(wildcard tensorflow/lite/experimental/ruy/pack.cc) \
$(wildcard tensorflow/lite/experimental/ruy/pmu.cc) \
$(wildcard tensorflow/lite/experimental/ruy/thread_pool.cc) \
$(wildcard tensorflow/lite/experimental/ruy/trace.cc) \
$(wildcard tensorflow/lite/experimental/ruy/trmul.cc) \
$(wildcard tensorflow/lite/experimental/ruy/tune.cc) \
$(wildcard tensorflow/lite/experimental/ruy/wait.cc)
tensorflow/lite/experimental/ruy/allocator.cc \
tensorflow/lite/experimental/ruy/block_map.cc \
tensorflow/lite/experimental/ruy/blocking_counter.cc \
tensorflow/lite/experimental/ruy/context.cc \
tensorflow/lite/experimental/ruy/detect_dotprod.cc \
tensorflow/lite/experimental/ruy/kernel.cc \
tensorflow/lite/experimental/ruy/pack.cc \
tensorflow/lite/experimental/ruy/pmu.cc \
tensorflow/lite/experimental/ruy/thread_pool.cc \
tensorflow/lite/experimental/ruy/trace.cc \
tensorflow/lite/experimental/ruy/trmul.cc \
tensorflow/lite/experimental/ruy/tune.cc \
tensorflow/lite/experimental/ruy/wait.cc
ifneq ($(BUILD_TYPE),micro)
CORE_CC_ALL_SRCS += \
$(wildcard tensorflow/lite/kernels/*.cc) \
@ -119,13 +119,9 @@ $(wildcard tensorflow/lite/kernels/internal/*.cc) \
$(wildcard tensorflow/lite/kernels/internal/optimized/*.cc) \
$(wildcard tensorflow/lite/kernels/internal/reference/*.cc) \
$(PROFILER_SRCS) \
$(wildcard tensorflow/lite/kernels/*.c) \
$(wildcard tensorflow/lite/kernels/internal/*.c) \
$(wildcard tensorflow/lite/kernels/internal/optimized/*.c) \
$(wildcard tensorflow/lite/kernels/internal/reference/*.c) \
$(wildcard tensorflow/lite/tools/make/downloads/farmhash/src/farmhash.cc) \
$(wildcard tensorflow/lite/tools/make/downloads/fft2d/fftsg.c) \
$(wildcard tensorflow/lite/tools/make/downloads/flatbuffers/src/util.cpp)
tensorflow/lite/tools/make/downloads/farmhash/src/farmhash.cc \
tensorflow/lite/tools/make/downloads/fft2d/fftsg.c \
tensorflow/lite/tools/make/downloads/flatbuffers/src/util.cpp
endif
# Remove any duplicates.
CORE_CC_ALL_SRCS := $(sort $(CORE_CC_ALL_SRCS))
@ -138,7 +134,7 @@ $(wildcard tensorflow/lite/kernels/*test_main.cc) \
$(wildcard tensorflow/lite/kernels/*test_util.cc) \
$(MINIMAL_SRCS)
BUILD_WITH_MMAP=true
BUILD_WITH_MMAP ?= true
ifeq ($(BUILD_TYPE),micro)
BUILD_WITH_MMAP=false
endif
@ -151,7 +147,7 @@ else
CORE_CC_EXCLUDE_SRCS += tensorflow/lite/mmap_allocation_disabled.cc
endif
BUILD_WITH_NNAPI=true
BUILD_WITH_NNAPI ?= true
ifeq ($(BUILD_TYPE),micro)
BUILD_WITH_NNAPI=false
endif
@ -191,7 +187,7 @@ EVALUATION_UTILS_SRCS := \
BENCHMARK_ALL_SRCS := $(TF_LITE_CC_SRCS) \
$(wildcard $(BENCHMARK_SRCS_DIR)/*.cc) \
$(PROFILE_SUMMARIZER_SRCS) \
$(CMD_LINE_TOOLS_SRCS) \
$(CMD_LINE_TOOLS_SRCS) \
$(EVALUATION_UTILS_SRCS)
BENCHMARK_SRCS := $(filter-out \

View File

@ -1,4 +1,4 @@
#!/bin/bash -x
#!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,9 +14,11 @@
# limitations under the License.
# ==============================================================================
set -x
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR/../../../.."
TENSORFLOW_DIR="${SCRIPT_DIR}/../../../.."
make -j 4 TARGET=aarch64 -C "${TENSORFLOW_DIR}" -f tensorflow/lite/tools/make/Makefile
CC_PREFIX=aarch64-linux-gnu- make -j 3 -f tensorflow/lite/tools/make/Makefile TARGET=aarch64 TARGET_ARCH=armv8-a

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -x
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
TENSORFLOW_DIR="${SCRIPT_DIR}/../../../.."
make -j 4 BUILD_WITH_NNAPI=false -C "${TENSORFLOW_DIR}" -f tensorflow/lite/tools/make/Makefile

View File

@ -1,4 +1,4 @@
#!/bin/bash -x
#!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,9 +14,11 @@
# limitations under the License.
# ==============================================================================
set -x
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR/../../../.."
TENSORFLOW_DIR="${SCRIPT_DIR}/../../../.."
make -j 4 TARGET=rpi -C "${TENSORFLOW_DIR}" -f tensorflow/lite/tools/make/Makefile
CC_PREFIX=arm-linux-gnueabihf- make -j 3 -f tensorflow/lite/tools/make/Makefile TARGET=rpi TARGET_ARCH=armv7l

View File

@ -18,8 +18,8 @@ pip install --upgrade <wheel>
Note, unlike tensorflow this will be installed to a tflite_runtime namespace.
You can then use the Tensorflow Lite interpreter as.
```
import tflite_runtime as tflr
interpreter = tflr.lite.Interpreter(model_path="foo.tflite")
from tflite_runtime import interpreter as tflr
interpreter = tflr.Interpreter(model_path="foo.tflite")
```
This currently works to build on Linux machines including Raspberry Pi. In

View File

@ -16,39 +16,36 @@
set -e
PYTHON="${PYTHON:-python}"
# Find where this script lives and then the Tensorflow root.
MY_DIRECTORY=`dirname $0`
export TENSORFLOW_SRC_ROOT=`realpath $MY_DIRECTORY/../../../..`
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
export TENSORFLOW_VERSION=`grep "_VERSION = " $TENSORFLOW_SRC_ROOT/tensorflow/tools/pip_package/setup.py | cut -d'=' -f 2 | sed "s/[ '-]//g"`;
export TENSORFLOW_SRC_ROOT="${SCRIPT_DIR}/../../../.."
export TENSORFLOW_VERSION=`grep "_VERSION = " "${TENSORFLOW_SRC_ROOT}/tensorflow/tools/pip_package/setup.py" | cut -d'=' -f 2 | sed "s/[ '-]//g"`;
TFLITE_ROOT="${TENSORFLOW_SRC_ROOT}/tensorflow/lite"
# Build a pip build tree.
BUILD_ROOT=/tmp/tflite_pip
rm -rf $BUILD_ROOT
mkdir -p $BUILD_ROOT/tflite_runtime/lite
mkdir -p $BUILD_ROOT/tflite_runtime/lite/python
BUILD_ROOT="/tmp/tflite_pip/${PYTHON}"
rm -rf "${BUILD_ROOT}"
mkdir -p "${BUILD_ROOT}/tflite_runtime/"
# Build an importable module tree
cat > $BUILD_ROOT/tflite_runtime/__init__.py <<EOF;
import tflite_runtime.lite.interpreter
EOF
# Copy necessary source files.
touch "${BUILD_ROOT}/tflite_runtime/__init__.py"
cp -r "${TFLITE_ROOT}/python/interpreter_wrapper" "${BUILD_ROOT}"
cp "${TFLITE_ROOT}/python/interpreter.py" "${BUILD_ROOT}/tflite_runtime/"
cp "${TFLITE_ROOT}/tools/pip_package/setup.py" "${BUILD_ROOT}"
cp "${TFLITE_ROOT}/tools/pip_package/MANIFEST.in" "${BUILD_ROOT}"
cat > $BUILD_ROOT/tflite_runtime/lite/__init__.py <<EOF;
from interpreter import Interpreter as Interpreter
EOF
# Build wheel file.
cd "${BUILD_ROOT}"
cat > $BUILD_ROOT/tflite_runtime/lite/python/__init__.py <<EOF;
# Python module for TensorFlow Lite
EOF
if [[ "${TENSORFLOW_TARGET}" == "rpi" ]]; then
${PYTHON} setup.py bdist_wheel --plat-name=linux-armv7l
elif [[ "${TENSORFLOW_TARGET}" == "aarch64" ]]; then
${PYTHON} setup.py bdist_wheel --plat-name=linux-aarch64
else
${PYTHON} setup.py bdist_wheel
fi
# Copy necessary source files
TFLITE_ROOT=$TENSORFLOW_SRC_ROOT/tensorflow/lite
cp -r $TFLITE_ROOT/python/interpreter_wrapper $BUILD_ROOT
cp $TFLITE_ROOT/python/interpreter.py $BUILD_ROOT/tflite_runtime/lite/
cp $TFLITE_ROOT/tools/pip_package/setup.py $BUILD_ROOT
cp $TFLITE_ROOT/tools/pip_package/MANIFEST.in $BUILD_ROOT
# Build the Pip
cd $BUILD_ROOT
python setup.py bdist_wheel

View File

@ -35,19 +35,19 @@ from setuptools import Extension
from setuptools import find_packages
from setuptools import setup
from setuptools.command.build_py import build_py
PACKAGE_NAME = 'tflite-runtime'
PACKAGE_NAME = 'tflite_runtime'
PACKAGE_VERSION = os.environ['TENSORFLOW_VERSION']
DOCLINES = __doc__.split('\n')
PACKAGE = 'tflite_runtime.lite.python'
TENSORFLOW_DIR = os.environ['TENSORFLOW_SRC_ROOT']
# Setup cross compiling
TARGET = (
os.environ['TENSORFLOW_TARGET'] if 'TENSORFLOW_TARGET' in os.environ
else None)
TARGET = os.environ.get('TENSORFLOW_TARGET', None)
if TARGET == 'rpi':
os.environ['CXX'] = 'arm-linux-gnueabihf-g++'
os.environ['CC'] = 'arm-linux-gnueabihf-g++'
os.environ['CC'] = 'arm-linux-gnueabihf-gcc'
elif TARGET == 'aarch64':
os.environ['CXX'] = 'aarch64-linux-gnu-g++'
os.environ['CC'] = 'aarch64-linux-gnu-gcc'
MAKE_CROSS_OPTIONS = ['TARGET=%s' % TARGET] if TARGET else []
RELATIVE_MAKE_DIR = os.path.join('tensorflow', 'lite', 'tools', 'make')
@ -69,7 +69,8 @@ def get_build_cpus():
def make_args(target='', quiet=True):
"""Construct make command line."""
args = (['make', 'SHELL=/bin/bash', '-C', TENSORFLOW_DIR]
args = (['make', 'SHELL=/bin/bash',
'BUILD_WITH_NNAPI=false', '-C', TENSORFLOW_DIR]
+ MAKE_CROSS_OPTIONS +
['-f', RELATIVE_MAKEFILE_PATH, '-j',
str(get_build_cpus())])
@ -101,6 +102,13 @@ def download_dependencies():
class CustomBuildExt(build_ext, object):
"""Customized build extension."""
def get_ext_filename(self, ext_name):
if TARGET:
ext_path = ext_name.split('.')
return os.path.join(*ext_path) + '.so'
return super(CustomBuildExt, self).get_ext_filename(ext_name)
def run(self):
download_dependencies()
@ -120,14 +128,17 @@ LIB_TFLITE = 'tensorflow-lite'
LIB_TFLITE_DIR = make_output('libdir')
ext = Extension(
name='%s._interpreter_wrapper' % PACKAGE,
name='%s._interpreter_wrapper' % PACKAGE_NAME,
language='c++',
sources=['interpreter_wrapper/interpreter_wrapper.i',
'interpreter_wrapper/interpreter_wrapper.cc'],
'interpreter_wrapper/interpreter_wrapper.cc',
'interpreter_wrapper/numpy.cc',
'interpreter_wrapper/python_error_reporter.cc',
'interpreter_wrapper/python_utils.cc'],
swig_opts=['-c++',
'-I%s' % TENSORFLOW_DIR,
'-module', 'interpreter_wrapper',
'-outdir', '.'],
'-outdir', PACKAGE_NAME],
extra_compile_args=['-std=c++11'],
include_dirs=[TENSORFLOW_DIR,
os.path.join(TENSORFLOW_DIR, 'tensorflow', 'lite', 'tools',
@ -152,7 +163,6 @@ setup(
keywords='tflite tensorflow tensor machine learning',
packages=find_packages(exclude=[]),
ext_modules=[ext],
package_dir={PACKAGE: '.'},
cmdclass={
'build_ext': CustomBuildExt,
'build_py': CustomBuildPy,