Tensorflow virtual pip package
PiperOrigin-RevId: 251972644
This commit is contained in:
parent
16450b47d7
commit
f1ffa0225a
@ -20,7 +20,7 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import tensorflow.lite.python.op_hint as op_hint
|
||||
from tensorflow.lite.python.op_hint import OpHint
|
||||
from tensorflow.python.eager import context
|
||||
from tensorflow.python.framework import dtypes
|
||||
from tensorflow.python.framework import ops
|
||||
@ -190,7 +190,7 @@ def dynamic_rnn(cell,
|
||||
"parent_last_child_output": parent_last_child_output,
|
||||
"internal_children_input_output": internal_children_input_output
|
||||
}
|
||||
tflite_wrapper = op_hint.OpHint(
|
||||
tflite_wrapper = OpHint(
|
||||
"TfLiteDynamicRnn",
|
||||
level=2,
|
||||
children_inputs_mappings=inputs_outputs_mappings)
|
||||
|
@ -21,7 +21,7 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
import itertools
|
||||
|
||||
import tensorflow.lite.python.op_hint as op_hint
|
||||
from tensorflow.lite.python.op_hint import OpHint
|
||||
from tensorflow.python.keras import activations
|
||||
from tensorflow.python.keras import initializers
|
||||
from tensorflow.python.layers import base as base_layer
|
||||
@ -76,7 +76,7 @@ class TfLiteRNNCell(rnn_cell_impl.LayerRNNCell):
|
||||
# Inputs must be Rank-2.
|
||||
self.input_spec = base_layer.InputSpec(ndim=2)
|
||||
|
||||
self._tflite_wrapper = op_hint.OpHint("UnidirectionalSequenceRnn")
|
||||
self._tflite_wrapper = OpHint("UnidirectionalSequenceRnn")
|
||||
self._num_units = num_units
|
||||
if activation:
|
||||
self._activation = activations.get(activation)
|
||||
@ -254,7 +254,7 @@ class TFLiteLSTMCell(rnn_cell_impl.LayerRNNCell):
|
||||
# TODO(raziel): layers stuff -- chop if un-layerizing Op.
|
||||
self.input_spec = base_layer.InputSpec(ndim=2)
|
||||
|
||||
self._tflite_wrapper = op_hint.OpHint("UnidirectionalSequenceLstm")
|
||||
self._tflite_wrapper = OpHint("UnidirectionalSequenceLstm")
|
||||
|
||||
self._num_units = num_units
|
||||
self._use_peepholes = use_peepholes
|
||||
|
@ -251,4 +251,5 @@ tensorflow/api_template_v1.__init__.py
|
||||
tensorflow/compat_template_v1.__init__.py
|
||||
tensorflow/compat_template.__init__.py
|
||||
tensorflow/api_template.__init__.py
|
||||
tensorflow/__init__.py
|
||||
tensorflow/__init__.py
|
||||
tensorflow/virtual_root.__init__.py
|
@ -132,6 +132,7 @@ function prepare_src() {
|
||||
popd > /dev/null
|
||||
cp -R $RUNFILES/third_party/eigen3 ${TMPDIR}/third_party
|
||||
|
||||
cp tensorflow/virtual_root.__init__.py ${TMPDIR}
|
||||
cp tensorflow/tools/pip_package/MANIFEST.in ${TMPDIR}
|
||||
cp tensorflow/tools/pip_package/README ${TMPDIR}
|
||||
cp tensorflow/tools/pip_package/setup.py ${TMPDIR}
|
||||
@ -157,6 +158,21 @@ function build_wheel() {
|
||||
fi
|
||||
|
||||
pushd ${TMPDIR} > /dev/null
|
||||
|
||||
# In order to break the circular dependency between tensorflow and
|
||||
# tensorflow_estimator which forces us to do a multi-step release, we are
|
||||
# creating a virtual pip package called tensorflow and moving all the tf code
|
||||
# into another pip called tensorflow_core:
|
||||
#
|
||||
# * move code from tensorflow to tensorflow_core
|
||||
# * create the virtual pip package: create folder and __init__.py file with
|
||||
# needed code for transparent forwarding
|
||||
#
|
||||
# This is transparent to internal code or to code not using the pip packages.
|
||||
mv tensorflow tensorflow_core
|
||||
mkdir tensorflow
|
||||
mv virtual_root.__init__.py tensorflow/__init__.py
|
||||
|
||||
rm -f MANIFEST
|
||||
echo $(date) : "=== Building wheel"
|
||||
"${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${PKG_NAME_FLAG} >/dev/null
|
||||
|
@ -141,8 +141,8 @@ class InstallCommand(InstallCommandBase):
|
||||
|
||||
def finalize_options(self):
|
||||
ret = InstallCommandBase.finalize_options(self)
|
||||
self.install_headers = os.path.join(self.install_purelib,
|
||||
'tensorflow', 'include')
|
||||
self.install_headers = os.path.join(self.install_purelib, 'tensorflow_core',
|
||||
'include')
|
||||
self.install_lib = self.install_platlib
|
||||
return ret
|
||||
|
||||
@ -180,14 +180,14 @@ class InstallHeaders(Command):
|
||||
# directories for -I
|
||||
install_dir = re.sub('/google/protobuf_archive/src', '', install_dir)
|
||||
|
||||
# Copy external code headers into tensorflow/include.
|
||||
# Copy external code headers into tensorflow_core/include.
|
||||
# A symlink would do, but the wheel file that gets created ignores
|
||||
# symlink within the directory hierarchy.
|
||||
# NOTE(keveman): Figure out how to customize bdist_wheel package so
|
||||
# we can do the symlink.
|
||||
external_header_locations = [
|
||||
'tensorflow/include/external/eigen_archive/',
|
||||
'tensorflow/include/external/com_google_absl/',
|
||||
'tensorflow_core/include/external/eigen_archive/',
|
||||
'tensorflow_core/include/external/com_google_absl/',
|
||||
]
|
||||
for location in external_header_locations:
|
||||
if location in install_dir:
|
||||
|
115
tensorflow/virtual_root.__init__.py
Normal file
115
tensorflow/virtual_root.__init__.py
Normal file
@ -0,0 +1,115 @@
|
||||
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""TensorFlow root package"""
|
||||
|
||||
from __future__ import absolute_import as _absolute_import
|
||||
from __future__ import division as _division
|
||||
from __future__ import print_function as _print_function
|
||||
|
||||
import sys as _sys
|
||||
import importlib as _importlib
|
||||
import types as _types
|
||||
|
||||
|
||||
# Since TensorFlow Python code now resides in tensorflow_core but TensorFlow
|
||||
# ecosystem code (e.g. estimator, but also even tensorflow) imports tensorflow
|
||||
# we need to do forwarding between the two. To do so, we use a lazy loader to
|
||||
# load and forward the top level modules. We cannot use the LazyLoader defined
|
||||
# by tensorflow at tensorflow/python/util/lazy_loader.py as to use that we would
|
||||
# already need to import tensorflow. Hence, we define it inline.
|
||||
class _LazyLoader(_types.ModuleType):
|
||||
"""Lazily import a module so that we can forward it."""
|
||||
|
||||
# The lint error here is incorrect.
|
||||
def __init__(self, local_name, parent_module_globals, name): # pylint: disable=super-on-old-class
|
||||
self._local_name = local_name
|
||||
self._parent_module_globals = parent_module_globals
|
||||
super(_LazyLoader, self).__init__(name)
|
||||
|
||||
def _load(self):
|
||||
"""Import the target module and insert it into the parent's namespace."""
|
||||
module = _importlib.import_module(self.__name__)
|
||||
self._parent_module_globals[self._local_name] = module
|
||||
self.__dict__.update(module.__dict__)
|
||||
return module
|
||||
|
||||
def __getattr__(self, item):
|
||||
module = self._load()
|
||||
return getattr(module, item)
|
||||
|
||||
def __dir__(self):
|
||||
module = self._load()
|
||||
return dir(module)
|
||||
|
||||
|
||||
# Forwarding a module is as simple as lazy loading the module from the new path
|
||||
# and then registering it to sys.modules using the old path
|
||||
def _forward_module(old_name):
|
||||
parts = old_name.split(".")
|
||||
parts[0] = parts[0] + "_core"
|
||||
local_name = parts[-1]
|
||||
existing_name = ".".join(parts)
|
||||
_module = _LazyLoader(local_name, globals(), existing_name)
|
||||
return _sys.modules.setdefault(old_name, _module)
|
||||
|
||||
|
||||
# This list should contain all modules _immediately_ under tensorflow
|
||||
_top_level_modules = [
|
||||
"tensorflow._api",
|
||||
"tensorflow.python",
|
||||
"tensorflow.tools",
|
||||
"tensorflow.core",
|
||||
"tensorflow.compiler",
|
||||
"tensorflow.lite",
|
||||
"tensorflow.keras",
|
||||
]
|
||||
# Estimator needs to be handled separatedly so we can still allow both
|
||||
# import tensorflow_estimator and import tensorflow.estimator work
|
||||
# Only in the second case do we actually need to do forwarding, the first case
|
||||
# already defines most of the hierarchy and eagerly forwarding would result in
|
||||
# an import loop.
|
||||
if "tensorflow_estimator" not in _sys.modules:
|
||||
_root_estimator = False
|
||||
_top_level_modules.append("tensorflow.estimator")
|
||||
else:
|
||||
_root_estimator = True
|
||||
|
||||
# Lazy load all of the _top_level_modules, we don't need their names anymore
|
||||
_top_level_modules = [_forward_module(m) for m in _top_level_modules]
|
||||
|
||||
# We still need all the names that are toplevel on tensorflow_core
|
||||
from tensorflow_core import *
|
||||
|
||||
# We also need to bring in keras if available in tensorflow_core
|
||||
# Above import * doesn't import it as __all__ is updated before keras is hooked
|
||||
try:
|
||||
from tensorflow_core import keras
|
||||
except ImportError as e:
|
||||
pass
|
||||
|
||||
# Similarly for estimator, but only if this file is not read via a
|
||||
# import tensorflow_estimator (same reasoning as above when forwarding estimator
|
||||
# separatedly from the rest of the top level modules)
|
||||
if not _root_estimator:
|
||||
try:
|
||||
from tensorflow_core import estimator
|
||||
except ImportError as e:
|
||||
pass
|
||||
|
||||
# And again for tensorboard
|
||||
try:
|
||||
from tensorflow_core import tensorboard
|
||||
except ImportError as e:
|
||||
pass
|
Loading…
Reference in New Issue
Block a user