Make the semantics of repository_root more consistent.

PiperOrigin-RevId: 326098252
Change-Id: I036f3943b1e35aa0543906128e0d78f59ccf1735
This commit is contained in:
A. Unique TensorFlower 2020-08-11 14:09:23 -07:00 committed by TensorFlower Gardener
parent c717e051fc
commit 45e89479c9
10 changed files with 114 additions and 127 deletions

View File

@ -81,7 +81,7 @@ Status ConvertXSpaceToProfileResponse(const XSpace& xspace,
response->set_empty_trace(true); response->set_empty_trace(true);
return Status::OK(); return Status::OK();
} }
TF_RETURN_IF_ERROR(SaveGzippedToolDataToTensorboardProfile( TF_RETURN_IF_ERROR(SaveGzippedToolData(
req.repository_root(), req.session_id(), req.host_name(), req.repository_root(), req.session_id(), req.host_name(),
ToolName(kTraceViewer), TraceEventsToJson(trace))); ToolName(kTraceViewer), TraceEventsToJson(trace)));
// Trace viewer is the only tool, skip OpStats conversion. // Trace viewer is the only tool, skip OpStats conversion.
@ -110,7 +110,7 @@ Status ConvertXSpaceToProfileResponse(const XSpace& xspace,
if (tools.contains(kMemoryProfile)) { if (tools.contains(kMemoryProfile)) {
std::string json_output; std::string json_output;
TF_RETURN_IF_ERROR(ConvertXSpaceToMemoryProfileJson(xspace, &json_output)); TF_RETURN_IF_ERROR(ConvertXSpaceToMemoryProfileJson(xspace, &json_output));
TF_RETURN_IF_ERROR(SaveGzippedToolDataToTensorboardProfile( TF_RETURN_IF_ERROR(SaveGzippedToolData(
req.repository_root(), req.session_id(), req.host_name(), req.repository_root(), req.session_id(), req.host_name(),
ToolName(kMemoryProfile), json_output)); ToolName(kMemoryProfile), json_output));
} }

View File

@ -7,8 +7,7 @@ import "tensorflow/core/profiler/profiler_service.proto";
message NewProfileSessionRequest { message NewProfileSessionRequest {
ProfileRequest request = 1; ProfileRequest request = 1;
// The place where we will dump profile data. We will normally use // The place where we will dump profile data. We will normally use
// MODEL_DIR as the repository root. The data will be saved under // MODEL_DIR/plugins/profile as the repository root.
// MODEL_DIR/plugins/profile/.
string repository_root = 2; string repository_root = 2;
repeated string hosts = 3; repeated string hosts = 3;
string session_id = 4; string session_id = 4;

View File

@ -48,8 +48,7 @@ message ProfileRequest {
ProfileOptions opts = 4; ProfileOptions opts = 4;
// The place where we will dump profile data. We will normally use // The place where we will dump profile data. We will normally use
// MODEL_DIR as the repository root. The data will be saved under // MODEL_DIR/plugins/profile/ as the repository root.
// MODEL_DIR/plugins/profile/.
string repository_root = 5; string repository_root = 5;
// The user provided profile session identifier. // The user provided profile session identifier.

View File

@ -26,6 +26,8 @@ cc_library(
"//tensorflow/core/profiler:profiler_options_proto_cc", "//tensorflow/core/profiler:profiler_options_proto_cc",
"//tensorflow/core/profiler:profiler_service_proto_cc", "//tensorflow/core/profiler:profiler_service_proto_cc",
"//tensorflow/core/profiler/convert:xplane_to_profile_response", "//tensorflow/core/profiler/convert:xplane_to_profile_response",
"//tensorflow/core/profiler/protobuf:xplane_proto_cc",
"//tensorflow/core:lib_internal",
] + tf_profiler_client_deps(), ] + tf_profiler_client_deps(),
) )

View File

@ -22,6 +22,7 @@ limitations under the License.
#include "absl/strings/str_join.h" #include "absl/strings/str_join.h"
#include "absl/strings/str_split.h" #include "absl/strings/str_split.h"
#include "tensorflow/core/platform/errors.h" #include "tensorflow/core/platform/errors.h"
#include "tensorflow/core/platform/host_info.h"
#include "tensorflow/core/platform/status.h" #include "tensorflow/core/platform/status.h"
#include "tensorflow/core/platform/types.h" #include "tensorflow/core/platform/types.h"
#include "tensorflow/core/profiler/convert/xplane_to_profile_response.h" #include "tensorflow/core/profiler/convert/xplane_to_profile_response.h"
@ -47,6 +48,30 @@ MonitorRequest PopulateMonitorRequest(int duration_ms, int monitoring_level,
return request; return request;
} }
ProfileRequest PopulateProfileRequest(int duration_ms,
const std::string& repository_root,
const std::string& session_id,
const std::string& host_name,
const ProfileOptions& opts) {
ProfileRequest request;
request.set_duration_ms(duration_ms);
request.set_max_events(kMaxEvents);
request.set_repository_root(repository_root);
request.set_session_id(session_id);
request.set_host_name(host_name);
request.add_tools("trace_viewer");
request.add_tools("op_profile");
request.add_tools("input_pipeline");
request.add_tools("kernel_stats");
request.add_tools("memory_viewer");
request.add_tools("memory_profile");
request.add_tools("overview_page");
request.add_tools("pod_viewer");
request.add_tools("tensorflow_stats");
*request.mutable_opts() = opts;
return request;
}
NewProfileSessionRequest PopulateNewProfileSessionRequest( NewProfileSessionRequest PopulateNewProfileSessionRequest(
const std::string& service_addr, const std::string& repository_root, const std::string& service_addr, const std::string& repository_root,
const std::vector<string>& hostnames, int duration_ms, const std::vector<string>& hostnames, int duration_ms,
@ -87,20 +112,20 @@ Status ConvertXSpaceToToolsInProfileResponse(const ProfileRequest& request,
return Status::OK(); return Status::OK();
} }
Status Profile(const std::string& service_addr, const std::string& logdir, Status Profile(const std::string& service_addr,
int duration_ms, const std::string& session_id, const std::string& repository_root, int duration_ms,
const ProfileOptions& opts) { const std::string& session_id, const ProfileOptions& opts) {
std::vector<std::string> parts = absl::StrSplit(service_addr, ':'); std::vector<std::string> parts = absl::StrSplit(service_addr, ':');
ProfileRequest request = ProfileRequest request = PopulateProfileRequest(duration_ms, repository_root,
PopulateProfileRequest(duration_ms, logdir, session_id, parts[0], opts); session_id, parts[0], opts);
ProfileResponse response; ProfileResponse response;
TF_RETURN_IF_ERROR(ProfileGrpc(service_addr, request, &response)); TF_RETURN_IF_ERROR(ProfileGrpc(service_addr, request, &response));
if (!response.empty_trace()) { if (!response.empty_trace()) {
TF_RETURN_IF_ERROR( TF_RETURN_IF_ERROR(
ConvertXSpaceToToolsInProfileResponse(request, &response)); ConvertXSpaceToToolsInProfileResponse(request, &response));
TF_RETURN_IF_ERROR(SaveTensorboardProfile( TF_RETURN_IF_ERROR(SaveProfile(repository_root, session_id,
logdir, session_id, request.host_name(), response, &std::cout)); request.host_name(), response, &std::cout));
// Print this at the end so that it's not buried in irrelevant LOG messages. // Print this at the end so that it's not buried in irrelevant LOG messages.
std::cout std::cout
<< "NOTE: using the trace duration " << duration_ms << "ms.\n" << "NOTE: using the trace duration " << duration_ms << "ms.\n"
@ -138,30 +163,6 @@ Status NewSession(const std::string& service_addr,
} // namespace } // namespace
ProfileRequest PopulateProfileRequest(int duration_ms,
const std::string& repository_root,
const std::string& session_id,
const std::string& host_name,
const ProfileOptions& opts) {
ProfileRequest request;
request.set_duration_ms(duration_ms);
request.set_max_events(kMaxEvents);
request.set_repository_root(repository_root);
request.set_session_id(session_id);
request.set_host_name(host_name);
request.add_tools("trace_viewer");
request.add_tools("op_profile");
request.add_tools("input_pipeline");
request.add_tools("kernel_stats");
request.add_tools("memory_viewer");
request.add_tools("memory_profile");
request.add_tools("overview_page");
request.add_tools("pod_viewer");
request.add_tools("tensorflow_stats");
*request.mutable_opts() = opts;
return request;
}
// Starts tracing on a single or multiple hosts and saves the result in the // Starts tracing on a single or multiple hosts and saves the result in the
// given logdir. If no trace was collected, retries tracing for // given logdir. If no trace was collected, retries tracing for
// num_tracing_attempts. // num_tracing_attempts.
@ -174,6 +175,9 @@ Status Trace(const std::string& service_addr, const std::string& logdir,
if (!workers_list.empty()) { if (!workers_list.empty()) {
hostnames = absl::StrSplit(workers_list, ','); hostnames = absl::StrSplit(workers_list, ',');
} }
TF_RETURN_IF_ERROR(MaybeCreateEmptyEventFile(logdir));
std::string repository_root =
profiler::GetTensorBoardProfilePluginDir(logdir);
Status status = Status::OK(); Status status = Status::OK();
int remaining_attempts = num_tracing_attempts; int remaining_attempts = num_tracing_attempts;
@ -181,9 +185,10 @@ Status Trace(const std::string& service_addr, const std::string& logdir,
std::cout << "Starting to trace for " << duration_ms << " ms. " std::cout << "Starting to trace for " << duration_ms << " ms. "
<< "Remaining attempt(s): " << --remaining_attempts << std::endl; << "Remaining attempt(s): " << --remaining_attempts << std::endl;
if (hostnames.empty()) { if (hostnames.empty()) {
status = Profile(service_addr, logdir, duration_ms, session_id, opts); status =
Profile(service_addr, repository_root, duration_ms, session_id, opts);
} else { } else {
status = NewSession(service_addr, logdir, hostnames, duration_ms, status = NewSession(service_addr, repository_root, hostnames, duration_ms,
session_id, opts); session_id, opts);
} }
if (remaining_attempts <= 0 || status.ok() || !ShouldRetryTracing(status)) if (remaining_attempts <= 0 || status.ok() || !ShouldRetryTracing(status))
@ -213,5 +218,23 @@ Status Monitor(const std::string& service_addr, int duration_ms,
return Status::OK(); return Status::OK();
} }
Status ExportToTensorBoard(const XSpace& xspace, const std::string& logdir) {
TF_RETURN_IF_ERROR(MaybeCreateEmptyEventFile(logdir));
ProfileResponse response;
ProfileRequest request = PopulateProfileRequest(
/*duration_ms=*/0, GetTensorBoardProfilePluginDir(logdir),
GetCurrentTimeStampAsString(), port::Hostname(), /*opts=*/{});
TF_RETURN_IF_ERROR(
ConvertXSpaceToProfileResponse(xspace, request, &response));
std::stringstream ss; // Record LOG messages.
TF_RETURN_IF_ERROR(SaveProfile(request.repository_root(),
request.session_id(), request.host_name(),
response, &ss));
LOG(INFO) << ss.str();
return Status::OK();
}
} // namespace profiler } // namespace profiler
} // namespace tensorflow } // namespace tensorflow

View File

@ -22,15 +22,13 @@ limitations under the License.
#include "tensorflow/core/platform/status.h" #include "tensorflow/core/platform/status.h"
#include "tensorflow/core/profiler/profiler_options.pb.h" #include "tensorflow/core/profiler/profiler_options.pb.h"
#include "tensorflow/core/profiler/profiler_service.pb.h" #include "tensorflow/core/profiler/profiler_service.pb.h"
#include "tensorflow/core/profiler/protobuf/xplane.pb.h"
namespace tensorflow { namespace tensorflow {
namespace profiler { namespace profiler {
ProfileRequest PopulateProfileRequest(int duration_ms, // Convert XSpace to tool data and saves under <logdir>/plugins/profile/.
const std::string& repository_root, Status ExportToTensorBoard(const XSpace& xspace, const std::string& logdir);
const std::string& session_id,
const std::string& host_name,
const ProfileOptions& opts);
// Collects one sample of monitoring profile and shows user-friendly metrics. // Collects one sample of monitoring profile and shows user-friendly metrics.
// If timestamp flag is true, timestamp will be displayed in "%H:%M:%S" format. // If timestamp flag is true, timestamp will be displayed in "%H:%M:%S" format.

View File

@ -82,10 +82,8 @@ string ProfilerJoinPath(const T&... args) {
constexpr char kProtoTraceFileName[] = "trace"; constexpr char kProtoTraceFileName[] = "trace";
constexpr char kTfStatsHelperSuffix[] = "tf_stats_helper_result"; constexpr char kTfStatsHelperSuffix[] = "tf_stats_helper_result";
Status DumpToolDataToLogDirectory(absl::string_view run_dir, Status DumpToolData(absl::string_view run_dir, absl::string_view host,
absl::string_view host, const ProfileToolData& tool, std::ostream* os) {
const ProfileToolData& tool,
std::ostream* os) {
// Don't save the intermediate results for combining the per host tool data. // Don't save the intermediate results for combining the per host tool data.
if (absl::EndsWith(tool.name(), kTfStatsHelperSuffix)) return Status::OK(); if (absl::EndsWith(tool.name(), kTfStatsHelperSuffix)) return Status::OK();
string host_prefix = host.empty() ? "" : absl::StrCat(host, "."); string host_prefix = host.empty() ? "" : absl::StrCat(host, ".");
@ -99,23 +97,6 @@ Status DumpToolDataToLogDirectory(absl::string_view run_dir,
return Status::OK(); return Status::OK();
} }
// Creates an empty event file if not already exists, which indicates that we
// have a plugins/profile/ directory in the current logdir.
Status MaybeCreateEmptyEventFile(const string& logdir) {
// Suffix for an empty event file. it should be kept in sync with
// _EVENT_FILE_SUFFIX in tensorflow/python/eager/profiler.py.
constexpr char kProfileEmptySuffix[] = ".profile-empty";
std::vector<string> children;
TF_RETURN_IF_ERROR(Env::Default()->GetChildren(logdir, &children));
for (const string& child : children) {
if (absl::EndsWith(child, kProfileEmptySuffix)) {
return Status::OK();
}
}
EventsWriter event_writer(ProfilerJoinPath(logdir, "events"));
return event_writer.InitWithSuffix(kProfileEmptySuffix);
}
Status WriteGzippedDataToFile(const string& filepath, const string& data) { Status WriteGzippedDataToFile(const string& filepath, const string& data) {
std::unique_ptr<WritableFile> file; std::unique_ptr<WritableFile> file;
TF_RETURN_IF_ERROR(Env::Default()->NewWritableFile(filepath, &file)); TF_RETURN_IF_ERROR(Env::Default()->NewWritableFile(filepath, &file));
@ -129,20 +110,14 @@ Status WriteGzippedDataToFile(const string& filepath, const string& data) {
return Status::OK(); return Status::OK();
} }
Status GetOrCreateProfileRunDir(const string& logdir, const string& run, Status GetOrCreateRunDir(const string& repository_root, const string& run,
string* profile_run_dir, std::ostream* os) { string* run_dir, std::ostream* os) {
// Dumps profile data to <logdir>/plugins/profile/<run>/. // Dumps profile data to <repository_root>/<run>/.
*profile_run_dir = *run_dir = ProfilerJoinPath(repository_root, run);
ProfilerJoinPath(GetTensorBoardProfilePluginDir(logdir), run); *os << "Creating directory: " << *run_dir;
*os << "Creating directory: " << *profile_run_dir; TF_RETURN_IF_ERROR(Env::Default()->RecursivelyCreateDir(*run_dir));
TF_RETURN_IF_ERROR(Env::Default()->RecursivelyCreateDir(*profile_run_dir));
// Creates an empty event file so that TensorBoard plugin logic can find
// the logdir.
TF_RETURN_IF_ERROR(MaybeCreateEmptyEventFile(logdir));
return Status::OK(); return Status::OK();
} }
} // namespace } // namespace
string GetTensorBoardProfilePluginDir(const string& logdir) { string GetTensorBoardProfilePluginDir(const string& logdir) {
@ -151,33 +126,42 @@ string GetTensorBoardProfilePluginDir(const string& logdir) {
return ProfilerJoinPath(logdir, kPluginName, kProfileName); return ProfilerJoinPath(logdir, kPluginName, kProfileName);
} }
Status SaveTensorboardProfile(const string& logdir, const string& run, Status MaybeCreateEmptyEventFile(const string& logdir) {
const string& host, // Suffix for an empty event file. it should be kept in sync with
const ProfileResponse& response, // _EVENT_FILE_SUFFIX in tensorflow/python/eager/profiler.py.
std::ostream* os) { constexpr char kProfileEmptySuffix[] = ".profile-empty";
string profile_run_dir; std::vector<string> children;
TF_RETURN_IF_ERROR( TF_RETURN_IF_ERROR(Env::Default()->GetChildren(logdir, &children));
GetOrCreateProfileRunDir(logdir, run, &profile_run_dir, os)); for (const string& child : children) {
if (absl::EndsWith(child, kProfileEmptySuffix)) {
return Status::OK();
}
}
EventsWriter event_writer(ProfilerJoinPath(logdir, "events"));
return event_writer.InitWithSuffix(kProfileEmptySuffix);
}
Status SaveProfile(const string& repository_root, const string& run,
const string& host, const ProfileResponse& response,
std::ostream* os) {
string run_dir;
TF_RETURN_IF_ERROR(GetOrCreateRunDir(repository_root, run, &run_dir, os));
for (const auto& tool_data : response.tool_data()) { for (const auto& tool_data : response.tool_data()) {
TF_RETURN_IF_ERROR( TF_RETURN_IF_ERROR(DumpToolData(run_dir, host, tool_data, os));
DumpToolDataToLogDirectory(profile_run_dir, host, tool_data, os));
} }
return Status::OK(); return Status::OK();
} }
Status SaveGzippedToolDataToTensorboardProfile(const string& logdir, Status SaveGzippedToolData(const string& repository_root, const string& run,
const string& run, const string& host, const string& tool_name,
const string& host, const string& data) {
const string& tool_name, string run_dir;
const string& data) {
string profile_run_dir;
std::stringstream ss; std::stringstream ss;
Status status = GetOrCreateProfileRunDir(logdir, run, &profile_run_dir, &ss); Status status = GetOrCreateRunDir(repository_root, run, &run_dir, &ss);
LOG(INFO) << ss.str(); LOG(INFO) << ss.str();
TF_RETURN_IF_ERROR(status); TF_RETURN_IF_ERROR(status);
string host_prefix = host.empty() ? "" : absl::StrCat(host, "."); string host_prefix = host.empty() ? "" : absl::StrCat(host, ".");
string path = string path = ProfilerJoinPath(run_dir, absl::StrCat(host_prefix, tool_name));
ProfilerJoinPath(profile_run_dir, absl::StrCat(host_prefix, tool_name));
TF_RETURN_IF_ERROR(WriteGzippedDataToFile(path, data)); TF_RETURN_IF_ERROR(WriteGzippedDataToFile(path, data));
LOG(INFO) << "Dumped gzipped tool data for " << tool_name << " to " << path; LOG(INFO) << "Dumped gzipped tool data for " << tool_name << " to " << path;
return Status::OK(); return Status::OK();

View File

@ -30,21 +30,22 @@ string GetCurrentTimeStampAsString();
// Returns the profile plugin directory given a logdir to TensorBoard. // Returns the profile plugin directory given a logdir to TensorBoard.
string GetTensorBoardProfilePluginDir(const string& logdir); string GetTensorBoardProfilePluginDir(const string& logdir);
// Saves all profiling tool data in a profile to a TensorBoard log directory // Creates an empty event file if not already exists, which indicates that we
// with the given run name. This writes user-facing log messages to `os`. // have a plugins/profile/ directory in the current logdir.
Status MaybeCreateEmptyEventFile(const string& logdir);
// Saves all profiling tool data in a profile to <repository_root>/<run>/.
// This writes user-facing log messages to `os`.
// Note: this function creates a directory even when all fields in // Note: this function creates a directory even when all fields in
// ProfileResponse are unset/empty. // ProfileResponse are unset/empty.
Status SaveTensorboardProfile(const string& logdir, const string& run, Status SaveProfile(const string& repository_root, const string& run,
const string& host, const string& host, const ProfileResponse& response,
const ProfileResponse& response, std::ostream* os);
std::ostream* os);
// Gzip the data and save to the specified filepath. // Gzip the data and save to <repository_root>/<run>/.
Status SaveGzippedToolDataToTensorboardProfile(const string& logdir, Status SaveGzippedToolData(const string& repository_root, const string& run,
const string& run, const string& host, const string& tool_name,
const string& host, const string& data);
const string& tool_name,
const string& data);
} // namespace profiler } // namespace profiler
} // namespace tensorflow } // namespace tensorflow

View File

@ -120,7 +120,6 @@ tf_python_pybind_extension(
"//tensorflow/core:lib", "//tensorflow/core:lib",
"//tensorflow/core/profiler/convert:op_stats_to_tf_stats", "//tensorflow/core/profiler/convert:op_stats_to_tf_stats",
"//tensorflow/core/profiler/convert:xplane_to_op_stats", "//tensorflow/core/profiler/convert:xplane_to_op_stats",
"//tensorflow/core/profiler/convert:xplane_to_profile_response",
"//tensorflow/core/profiler/convert:xplane_to_trace_events", "//tensorflow/core/profiler/convert:xplane_to_trace_events",
"//tensorflow/core/profiler/lib:profiler_session_headers", "//tensorflow/core/profiler/lib:profiler_session_headers",
"//tensorflow/core/profiler/rpc:profiler_server_headers", "//tensorflow/core/profiler/rpc:profiler_server_headers",

View File

@ -20,7 +20,6 @@ limitations under the License.
#include "pybind11/pybind11.h" #include "pybind11/pybind11.h"
#include "pybind11/pytypes.h" #include "pybind11/pytypes.h"
#include "tensorflow/core/platform/errors.h" #include "tensorflow/core/platform/errors.h"
#include "tensorflow/core/platform/host_info.h"
#include "tensorflow/core/platform/status.h" #include "tensorflow/core/platform/status.h"
#include "tensorflow/core/platform/types.h" #include "tensorflow/core/platform/types.h"
#include "tensorflow/core/profiler/convert/op_stats_to_input_pipeline_analysis.h" #include "tensorflow/core/profiler/convert/op_stats_to_input_pipeline_analysis.h"
@ -28,7 +27,6 @@ limitations under the License.
#include "tensorflow/core/profiler/convert/op_stats_to_tf_stats.h" #include "tensorflow/core/profiler/convert/op_stats_to_tf_stats.h"
#include "tensorflow/core/profiler/convert/xplane_to_memory_profile.h" #include "tensorflow/core/profiler/convert/xplane_to_memory_profile.h"
#include "tensorflow/core/profiler/convert/xplane_to_op_stats.h" #include "tensorflow/core/profiler/convert/xplane_to_op_stats.h"
#include "tensorflow/core/profiler/convert/xplane_to_profile_response.h"
#include "tensorflow/core/profiler/convert/xplane_to_trace_events.h" #include "tensorflow/core/profiler/convert/xplane_to_trace_events.h"
#include "tensorflow/core/profiler/lib/profiler_session.h" #include "tensorflow/core/profiler/lib/profiler_session.h"
#include "tensorflow/core/profiler/protobuf/input_pipeline.pb.h" #include "tensorflow/core/profiler/protobuf/input_pipeline.pb.h"
@ -105,23 +103,7 @@ class ProfilerSessionWrapper {
tensorflow::Status status; tensorflow::Status status;
status = session_->CollectData(&xspace); status = session_->CollectData(&xspace);
session_.reset(); session_.reset();
tensorflow::MaybeRaiseRegisteredFromStatus(status); status = tensorflow::profiler::ExportToTensorBoard(xspace, logdir_);
tensorflow::ProfileResponse response;
tensorflow::ProfileRequest request =
tensorflow::profiler::PopulateProfileRequest(
/*duration_ms=*/0, logdir_,
tensorflow::profiler::GetCurrentTimeStampAsString(),
tensorflow::port::Hostname(), /*opts=*/{});
status = tensorflow::profiler::ConvertXSpaceToProfileResponse(
xspace, request, &response);
tensorflow::MaybeRaiseRegisteredFromStatus(status);
std::stringstream ss; // Record LOG messages.
status = tensorflow::profiler::SaveTensorboardProfile(
request.repository_root(), request.session_id(), request.host_name(),
response, &ss);
LOG(INFO) << ss.str();
tensorflow::MaybeRaiseRegisteredFromStatus(status); tensorflow::MaybeRaiseRegisteredFromStatus(status);
} }