[XLA] Remove the xla_status_add_backtrace flag.
Replace the flag by always emitting the status and the backtrace to LOG(WARNING) PiperOrigin-RevId: 162958352
This commit is contained in:
parent
17f9320b33
commit
62bced8280
@ -128,7 +128,6 @@ cc_library(
|
||||
":tfcompile_lib",
|
||||
":tfcompile_proto",
|
||||
"//tensorflow/compiler/xla/legacy_flags:debug_options_flags",
|
||||
"//tensorflow/compiler/xla/legacy_flags:util_flags",
|
||||
"//tensorflow/compiler/xla/service:compiler",
|
||||
"//tensorflow/core:core_cpu",
|
||||
"//tensorflow/core:core_cpu_internal",
|
||||
|
@ -24,7 +24,6 @@ limitations under the License.
|
||||
#include "tensorflow/compiler/aot/tfcompile.pb.h"
|
||||
#include "tensorflow/compiler/aot/tfcompile_util.h"
|
||||
#include "tensorflow/compiler/xla/legacy_flags/debug_options_flags.h"
|
||||
#include "tensorflow/compiler/xla/legacy_flags/util_flags.h"
|
||||
#include "tensorflow/compiler/xla/service/compiler.h"
|
||||
#include "tensorflow/core/framework/function.h"
|
||||
#include "tensorflow/core/framework/graph.pb.h"
|
||||
@ -130,7 +129,6 @@ int main(int argc, char** argv) {
|
||||
std::vector<tensorflow::Flag> flag_list;
|
||||
AppendMainFlags(&flag_list, &flags);
|
||||
xla::legacy_flags::AppendDebugOptionsFlags(&flag_list);
|
||||
xla::legacy_flags::AppendUtilFlags(&flag_list);
|
||||
|
||||
tensorflow::string usage = tensorflow::tfcompile::kUsageHeader;
|
||||
usage += tensorflow::Flags::Usage(argv[0], flag_list);
|
||||
|
@ -168,7 +168,6 @@ cc_library(
|
||||
":status",
|
||||
":types",
|
||||
":xla_data_proto",
|
||||
"//tensorflow/compiler/xla/legacy_flags:util_flags",
|
||||
"//tensorflow/core:lib",
|
||||
],
|
||||
)
|
||||
|
@ -41,18 +41,6 @@ cc_test(
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "util_flags",
|
||||
srcs = ["util_flags.cc"],
|
||||
hdrs = ["util_flags.h"],
|
||||
deps =
|
||||
[
|
||||
":parse_flags_from_env",
|
||||
"//tensorflow/core:framework_internal",
|
||||
"//tensorflow/core:lib",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "debug_options_flags",
|
||||
srcs = ["debug_options_flags.cc"],
|
||||
|
@ -1,62 +0,0 @@
|
||||
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
|
||||
// Legacy flags for XLA's util module.
|
||||
|
||||
#include <mutex> // NOLINT(build/c++11): only using std::call_once, not mutex.
|
||||
#include <vector>
|
||||
|
||||
#include "tensorflow/compiler/xla/legacy_flags/parse_flags_from_env.h"
|
||||
#include "tensorflow/compiler/xla/legacy_flags/util_flags.h"
|
||||
#include "tensorflow/core/platform/types.h"
|
||||
#include "tensorflow/core/util/command_line_flags.h"
|
||||
|
||||
namespace xla {
|
||||
namespace legacy_flags {
|
||||
|
||||
// Pointers to the parsed value of the flags and flag descriptors, initialized
|
||||
// via flags_init.
|
||||
static UtilFlags* flags;
|
||||
static std::vector<tensorflow::Flag>* flag_list;
|
||||
static std::once_flag flags_init;
|
||||
|
||||
// Allocate *flags. Called via call_once(&flags_init,...).
|
||||
static void AllocateFlags() {
|
||||
flags = new UtilFlags;
|
||||
flags->xla_status_add_backtrace = false;
|
||||
flag_list = new std::vector<tensorflow::Flag>({
|
||||
tensorflow::Flag("xla_status_add_backtrace",
|
||||
&flags->xla_status_add_backtrace,
|
||||
"add backtraces to XLA-produced status values"),
|
||||
});
|
||||
ParseFlagsFromEnv(*flag_list);
|
||||
}
|
||||
|
||||
// Append to *append_to flag definitions associated with XLA's util module.
|
||||
void AppendUtilFlags(std::vector<tensorflow::Flag>* append_to) {
|
||||
std::call_once(flags_init, &AllocateFlags);
|
||||
append_to->insert(append_to->end(), flag_list->begin(), flag_list->end());
|
||||
}
|
||||
|
||||
// Return a pointer to the UtilFlags struct;
|
||||
// repeated calls return the same pointer.
|
||||
// This should be called only after Flags::Parse() has returned.
|
||||
UtilFlags* GetUtilFlags() {
|
||||
std::call_once(flags_init, &AllocateFlags);
|
||||
return flags;
|
||||
}
|
||||
|
||||
} // namespace legacy_flags
|
||||
} // namespace xla
|
@ -1,45 +0,0 @@
|
||||
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
|
||||
#ifndef TENSORFLOW_COMPILER_XLA_LEGACY_FLAGS_UTIL_FLAGS_H_
|
||||
#define TENSORFLOW_COMPILER_XLA_LEGACY_FLAGS_UTIL_FLAGS_H_
|
||||
|
||||
// Legacy flags for the XLA's util module.
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "tensorflow/core/platform/types.h"
|
||||
#include "tensorflow/core/util/command_line_flags.h"
|
||||
|
||||
namespace xla {
|
||||
namespace legacy_flags {
|
||||
|
||||
// Append to *flag_list flag definitions associated with XLA's util module.
|
||||
void AppendUtilFlags(std::vector<tensorflow::Flag>* flag_list);
|
||||
|
||||
// The values of flags associated with XLA's util module.
|
||||
typedef struct {
|
||||
bool xla_status_add_backtrace; // add backtraces to XLA-produced statuses
|
||||
} UtilFlags;
|
||||
|
||||
// Return a pointer to the UtilFlags struct;
|
||||
// repeated calls return the same pointer.
|
||||
// This should be called only after Flags::Parse() has returned.
|
||||
UtilFlags* GetUtilFlags();
|
||||
|
||||
} // namespace legacy_flags
|
||||
} // namespace xla
|
||||
|
||||
#endif // TENSORFLOW_COMPILER_XLA_LEGACY_FLAGS_UTIL_FLAGS_H_
|
@ -19,7 +19,6 @@ limitations under the License.
|
||||
#include <stdarg.h>
|
||||
#include <numeric>
|
||||
|
||||
#include "tensorflow/compiler/xla/legacy_flags/util_flags.h"
|
||||
#include "tensorflow/compiler/xla/types.h"
|
||||
#include "tensorflow/core/lib/core/errors.h"
|
||||
#include "tensorflow/core/lib/strings/numbers.h"
|
||||
@ -32,18 +31,12 @@ limitations under the License.
|
||||
namespace xla {
|
||||
namespace {
|
||||
|
||||
// Adds a backtrace to the provided status iff the xla_status_add_backtrace flag
|
||||
// is set. This is useful for quickly tracing status errors observed coming out
|
||||
// of the service.
|
||||
Status MaybeAddBacktrace(const Status& prior) {
|
||||
DCHECK(!prior.ok());
|
||||
if (legacy_flags::GetUtilFlags()->xla_status_add_backtrace) {
|
||||
return Status{prior.code(),
|
||||
tensorflow::strings::StrCat(prior.error_message(), " :: ",
|
||||
tensorflow::CurrentStackTrace())};
|
||||
} else {
|
||||
return prior;
|
||||
}
|
||||
// Logs the provided status message with a backtrace.
|
||||
Status WithLogBacktrace(const Status& status) {
|
||||
CHECK(!status.ok());
|
||||
LOG(WARNING) << status.ToString();
|
||||
LOG(WARNING) << tensorflow::CurrentStackTrace();
|
||||
return status;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -86,7 +79,7 @@ Status InvalidArgument(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::InvalidArgument(message));
|
||||
return WithLogBacktrace(tensorflow::errors::InvalidArgument(message));
|
||||
}
|
||||
|
||||
Status Unimplemented(const char* format, ...) {
|
||||
@ -95,7 +88,7 @@ Status Unimplemented(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::Unimplemented(message));
|
||||
return WithLogBacktrace(tensorflow::errors::Unimplemented(message));
|
||||
}
|
||||
|
||||
Status InternalError(const char* format, ...) {
|
||||
@ -104,7 +97,7 @@ Status InternalError(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::Internal(message));
|
||||
return WithLogBacktrace(tensorflow::errors::Internal(message));
|
||||
}
|
||||
|
||||
Status FailedPrecondition(const char* format, ...) {
|
||||
@ -113,7 +106,7 @@ Status FailedPrecondition(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::FailedPrecondition(message));
|
||||
return WithLogBacktrace(tensorflow::errors::FailedPrecondition(message));
|
||||
}
|
||||
|
||||
Status ResourceExhausted(const char* format, ...) {
|
||||
@ -122,7 +115,7 @@ Status ResourceExhausted(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::ResourceExhausted(message));
|
||||
return WithLogBacktrace(tensorflow::errors::ResourceExhausted(message));
|
||||
}
|
||||
|
||||
Status NotFound(const char* format, ...) {
|
||||
@ -131,7 +124,7 @@ Status NotFound(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::NotFound(message));
|
||||
return WithLogBacktrace(tensorflow::errors::NotFound(message));
|
||||
}
|
||||
|
||||
Status Unavailable(const char* format, ...) {
|
||||
@ -140,7 +133,7 @@ Status Unavailable(const char* format, ...) {
|
||||
va_start(args, format);
|
||||
tensorflow::strings::Appendv(&message, format, args);
|
||||
va_end(args);
|
||||
return MaybeAddBacktrace(tensorflow::errors::Unavailable(message));
|
||||
return WithLogBacktrace(tensorflow::errors::Unavailable(message));
|
||||
}
|
||||
|
||||
string Reindent(tensorflow::StringPiece original,
|
||||
|
Loading…
Reference in New Issue
Block a user