[BatchScheduler] Avoid mutex contention when deciding if a batch is empty. This is an optimization on read-path only.

PiperOrigin-RevId: 323206654
Change-Id: I566f6be038c77b7c5a66aad7a036c9d696f28a17
This commit is contained in:
Mingming Liu 2020-07-25 21:01:48 -07:00 committed by TensorFlower Gardener
parent 5c083e3852
commit 82e0f12b98

View File

@ -128,6 +128,8 @@ class Batch {
// The sum of the sizes of the tasks in 'tasks_'. // The sum of the sizes of the tasks in 'tasks_'.
size_t size_ TF_GUARDED_BY(mu_) = 0; size_t size_ TF_GUARDED_BY(mu_) = 0;
std::atomic<bool> empty_ TF_GUARDED_BY(mu_){true};
// Whether the batch has been closed. // Whether the batch has been closed.
Notification closed_; Notification closed_;
@ -215,6 +217,7 @@ void Batch<TaskType>::AddTask(std::unique_ptr<TaskType> task) {
mutex_lock l(mu_); mutex_lock l(mu_);
size_ += task->size(); size_ += task->size();
tasks_.push_back(std::move(task)); tasks_.push_back(std::move(task));
empty_.store(false);
} }
} }
@ -228,6 +231,9 @@ std::unique_ptr<TaskType> Batch<TaskType>::RemoveTask() {
std::unique_ptr<TaskType> task = std::move(tasks_.back()); std::unique_ptr<TaskType> task = std::move(tasks_.back());
size_ -= task->size(); size_ -= task->size();
tasks_.pop_back(); tasks_.pop_back();
if (tasks_.empty()) {
empty_.store(true);
}
return task; return task;
} }
} }
@ -241,16 +247,13 @@ int Batch<TaskType>::num_tasks() const {
} }
template <typename TaskType> template <typename TaskType>
bool Batch<TaskType>::empty() const { bool Batch<TaskType>::empty() const TF_NO_THREAD_SAFETY_ANALYSIS {
{ // tracer is added to zoom in about this method.
// tracer is added to zoom in about this method. // TODO(b/160249203): Remove tracer after evaluating a change to reduce
// TODO(b/160249203): Remove tracer after evaluating a change to reduce // lock contention and cpu usage (which is observed in profiler and
// lock contention and cpu usage (which is observed in profiler and // very data-driven).
// very data-driven). tensorflow::profiler::TraceMe tracer("BatchTask::empty");
tensorflow::profiler::TraceMe tracer("BatchTask::empty"); return empty_.load();
mutex_lock l(mu_);
return tasks_.empty();
}
} }
template <typename TaskType> template <typename TaskType>