STT-tensorflow/tensorflow/stream_executor/event.cc
A. Unique TensorFlower 955e356e4c [SE,XLA] Switch to using multiple streams in xla_device_context
Instead of having one stream for compute, host-to-device and device-to-host transfers, switch to having separate streams, just like the GPU does.
Add a se::Event field to XlaTensor to allow accurate inter-stream dependencies to be created.

As part of this:
 - Fix TransferManager::TransferLiteralFrom/ToDevice to correctly make generated substreams wait on their master stream.
 - Fix Stream::BlockHostUntilDone() to not block on or return substreams. This behavior is completely broken and not only nondeterministically returns substreams to the pool but causes indefinite hangs with the HostStream.

PiperOrigin-RevId: 203726543
2018-07-09 01:54:33 -07:00

54 lines
1.6 KiB
C++

/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/stream_executor/event.h"
#include "tensorflow/stream_executor/stream.h"
#include "tensorflow/stream_executor/stream_executor_internal.h"
#include "tensorflow/stream_executor/stream_executor_pimpl.h"
namespace stream_executor {
Event::Event(StreamExecutor* stream_exec)
: stream_exec_(stream_exec),
implementation_(
stream_exec_->implementation()->CreateEventImplementation()) {}
Event::~Event() {
// Deal with nullptr implementation_, as this event may have been std::moved.
if (stream_exec_ && implementation_) {
auto status = stream_exec_->DeallocateEvent(this);
if (!status.ok()) {
LOG(ERROR) << status.error_message();
}
}
}
bool Event::Init() {
auto status = stream_exec_->AllocateEvent(this);
if (!status.ok()) {
LOG(ERROR) << status.error_message();
return false;
}
return true;
}
Event::Status Event::PollForStatus() {
return stream_exec_->PollForEventStatus(this);
}
} // namespace stream_executor