From 46f7108d78c6a3c0854fe66ce1cd92e5ebb3d6e2 Mon Sep 17 00:00:00 2001 From: Rohan Jain Date: Mon, 18 May 2020 09:08:29 -0700 Subject: [PATCH] Internal change PiperOrigin-RevId: 312090528 Change-Id: I474709513b01db8c24c50fd670029451c51cb622 --- tensorflow/python/keras/layers/embeddings.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tensorflow/python/keras/layers/embeddings.py b/tensorflow/python/keras/layers/embeddings.py index 3f57fd6cb63..e30e93f02dc 100644 --- a/tensorflow/python/keras/layers/embeddings.py +++ b/tensorflow/python/keras/layers/embeddings.py @@ -129,8 +129,10 @@ class Embedding(Layer): # since it knows all kernels using the variable only exist on CPU. # When eager execution is enabled, the placement decision has to be made # right now. Checking for the presence of GPUs to avoid complicating the - # TPU codepaths which can handle sparse optimizers. - if context.executing_eagerly() and context.context().num_gpus(): + # TPU codepaths which can handle sparse optimizers. But if we are within + # a tf.function, we go back the graph mode logic and rely on the placer. + if (context.executing_eagerly() and context.context().num_gpus() and + not ops.inside_function()): with ops.device('cpu:0'): self.embeddings = self.add_weight( shape=(self.input_dim, self.output_dim),