replacing 'iter_limit' with 'end'

This commit is contained in:
Tare Gaskin 2020-07-10 15:03:43 +00:00
parent 6162dbe55e
commit 63fecc718d
8 changed files with 20 additions and 20 deletions

View File

@ -62,14 +62,14 @@ InferenceContext::InferenceContext(
}
std::vector<std::unique_ptr<std::vector<ShapeAndType>>> handle_data(
input_shapes.size());
for (int i = 0, iter_limit = input_handle_shapes_and_types.size(); i < iter_limit; ++i) {
for (int i = 0, end = input_handle_shapes_and_types.size(); i < end; ++i) {
const auto& v = input_handle_shapes_and_types[i];
if (v == nullptr) {
continue;
}
handle_data[i].reset(new std::vector<ShapeAndType>(v->size()));
auto& new_v = *handle_data[i];
for (int j = 0, iter_limit = v->size(); j < iter_limit; ++j) {
for (int j = 0, end = v->size(); j < end; ++j) {
const auto& p = (*v)[j];
construction_status_.Update(
MakeShapeFromPartialTensorShape(p.first, &new_v[j].shape));
@ -1103,7 +1103,7 @@ Status InferenceContext::AttachContext(const Status& status) {
std::vector<string> input_from_tensors_str;
std::vector<string> input_from_tensors_as_shape_str;
input_from_tensors_as_shape_str.reserve(inputs_.size());
for (int i = 0, iter_limit = inputs_.size(); i < iter_limit; ++i) {
for (int i = 0, end = inputs_.size(); i < end; ++i) {
int input_tensors_size_ = input_tensors_.size();
int input_tensors_as_shapes_size_ = input_tensors_as_shapes_.size();
if (requested_input_tensor_as_partial_shape_[i] &&
@ -1146,7 +1146,7 @@ bool InferenceContext::MergeHandleShapesAndTypes(
}
std::vector<ShapeAndType> new_values(shapes_and_types.size());
bool refined = false;
for (int i = 0, iter_limit = shapes_and_types.size(); i < iter_limit; ++i) {
for (int i = 0, end = shapes_and_types.size(); i < end; ++i) {
const ShapeAndType& existing = (*to_update)[i];
if (shapes_and_types[i].dtype == existing.dtype) {
new_values[i].dtype = existing.dtype;
@ -1170,7 +1170,7 @@ bool InferenceContext::MergeHandleShapesAndTypes(
if (!refined) {
return false;
}
for (int i = 0, iter_limit = new_values.size(); i < iter_limit; ++i) {
for (int i = 0, end = new_values.size(); i < end; ++i) {
(*to_update)[i] = new_values[i];
}
return true;
@ -1205,7 +1205,7 @@ bool InferenceContext::RelaxHandleShapesAndMergeTypes(
return false;
}
std::vector<ShapeAndType> new_values(shapes_and_types.size());
for (int i = 0, iter_limit = shapes_and_types.size(); i < iter_limit; ++i) {
for (int i = 0, end = shapes_and_types.size(); i < end; ++i) {
const ShapeAndType& existing = (*to_update)[i];
if (shapes_and_types[i].dtype == existing.dtype) {
new_values[i].dtype = existing.dtype;

View File

@ -1514,7 +1514,7 @@ Costs OpLevelCostEstimator::PredictEinsum(const OpContext& op_context) const {
n_dim.set_size(1);
k_dim.set_size(1);
for (int i_idx = 0, iter_limit = a_input_str.size(); i_idx < iter_limit; ++i_idx) {
for (int i_idx = 0, end = a_input_str.size(); i_idx < end; ++i_idx) {
if (b_input_str.find(a_input_str[i_idx]) == std::string::npos) {
if (rhs_str.find(a_input_str[i_idx]) == std::string::npos) {
VLOG(1) << "Missing accurate estimator for op: " << op_info.op();
@ -1534,7 +1534,7 @@ Costs OpLevelCostEstimator::PredictEinsum(const OpContext& op_context) const {
*(a_matrix_shape->add_dim()) = a_input_shape.dim(i_idx);
*(b_matrix_shape->add_dim()) = a_input_shape.dim(i_idx);
}
for (int i_idx = 0, iter_limit = b_input_str.size(); i_idx < iter_limit; ++i_idx) {
for (int i_idx = 0, end = b_input_str.size(); i_idx < end; ++i_idx) {
if (a_input_str.find(b_input_str[i_idx]) == std::string::npos) {
if (rhs_str.find(b_input_str[i_idx]) == std::string::npos) {
VLOG(1) << "Missing accurate estimator for op: " << op_info.op();

View File

@ -73,7 +73,7 @@ class UniqueNodes {
if (it == memoized_signatures_.end()) return;
std::vector<NodeDef*>& candidates = rep_[it->second];
for (int i = 0, iter_limit = candidates.size(); i < iter_limit; ++i) {
for (int i = 0, end = candidates.size(); i < end; ++i) {
if (candidates[i] == node) {
std::swap(candidates[i], candidates[candidates.size() - 1]);
candidates.resize(candidates.size() - 1);

View File

@ -63,7 +63,7 @@ Status DebugStripper::Optimize(Cluster* cluster, const GrapplerItem& item,
node.mutable_attr()->swap(new_attr);
// As Identity op only takes one input, mark redundant inputs as control
// input.
for (int i = 1, iter_limit = node.input_size(); i < iter_limit; ++i) {
for (int i = 1, end = node.input_size(); i < end; ++i) {
if (!IsControlInput(node.input(i))) {
*node.mutable_input(i) = AsControlDependency(NodeName(node.input(i)));
}

View File

@ -357,7 +357,7 @@ void PermuteNodesInPlace(GraphDef* graph, std::vector<int>* permutation,
}
permutation->swap(inv_perm);
}
for (int n = 0, iter_limit = permutation->size(); n + 1 < iter_limit; ++n) {
for (int n = 0, end = permutation->size(); n + 1 < end; ++n) {
while (n != (*permutation)[n]) {
std::size_t r = (*permutation)[n];
graph->mutable_node()->SwapElements(n, r);

View File

@ -130,7 +130,7 @@ void DerivedXLineBuilder::ExpandOrAddLevelEvent(const XEvent& event,
}
void DerivedXLineBuilder::ResetLastEvents(int level) {
for (int i = level, iter_limit = last_event_by_level_.size(); i < iter_limit; ++i) {
for (int i = level, end = last_event_by_level_.size(); i < end; ++i) {
last_event_by_level_[i] = absl::nullopt;
}
if (level == 0) ResetDependentLines();

View File

@ -63,7 +63,7 @@ void AddInferredAttr(const string& indentation, const string& attr_name,
string VectorToTuple(const std::vector<string>& l) {
if (l.size() == 1) return strings::StrCat("(", l.front(), ",)");
string ret = "(";
for (int i = 0, iter_limit = l.size(); i < iter_limit; ++i) {
for (int i = 0, end = l.size(); i < end; ++i) {
if (i > 0) {
strings::StrAppend(&ret, ", ");
}
@ -75,11 +75,11 @@ string VectorToTuple(const std::vector<string>& l) {
void Unflatten(const string& prefix, const std::vector<string>& output_sizes,
const string& var, string* result) {
for (int i = 0, iter_limit = output_sizes.size(); i < iter_limit; ++i) {
for (int i = 0, end = output_sizes.size(); i < end; ++i) {
if (!output_sizes[i].empty()) {
strings::StrAppend(result, prefix, var, " = ");
if (i > 0) strings::StrAppend(result, var, "[:", i, "] + ");
if (i + 1 < iter_limit) {
if (i + 1 < end) {
// Special case i == 0 to avoid "0 +" in the generated code.
if (i == 0) {
strings::StrAppend(result, "[", var, "[:", output_sizes[i], "]] + ",
@ -295,7 +295,7 @@ string GenEagerPythonOp::Code() {
// from the end of params_no_default_, and adding params_no_default_.
attrs_.reserve(params_no_default_.size() - op_def_.input_arg_size() +
params_with_default_.size());
for (int i = op_def_.input_arg_size(), iter_limit = params_no_default_.size(); i < iter_limit; ++i) {
for (int i = op_def_.input_arg_size(), end = params_no_default_.size(); i < end; ++i) {
attrs_.push_back(params_no_default_[i].GetName());
}
for (const auto& p : params_with_default_) {
@ -331,7 +331,7 @@ string GenEagerPythonOp::Code() {
parameters_with_defaults.empty() ? "" : ", ", "name=None");
// Add attr_expressions_ for attrs that are params.
for (int i = 0, iter_limit = attrs_.size(); i < iter_limit; ++i) {
for (int i = 0, end = attrs_.size(); i < end; ++i) {
const string& attr_name = attrs_[i];
const string& attr_api_name =
param_names_[i + op_def_.input_arg_size()].GetRenameTo();
@ -522,7 +522,7 @@ bool GenEagerPythonOp::GetEagerFunctionSetup(const string& indentation,
}
}
for (int i = 0, iter_limit = attrs_.size(); i < iter_limit; ++i) {
for (int i = 0, end = attrs_.size(); i < end; ++i) {
const string& attr_name = attrs_[i];
const auto& param = param_names_[i + op_def_.input_arg_size()];
const auto& attr = *FindAttr(attr_name, op_def_);

View File

@ -561,10 +561,10 @@ string GenPythonOp::Code() {
// from the end of args_no_default, and adding args_no_default.
attrs_.reserve(params_no_default.size() - op_def_.input_arg_size() +
params_with_default.size());
for (int i = op_def_.input_arg_size(), iter_limit = params_no_default.size(); i < iter_limit; ++i) {
for (int i = op_def_.input_arg_size(), end = params_no_default.size(); i < end; ++i) {
attrs_.push_back(params_no_default[i].GetName());
}
for (int i = 0, iter_limit = params_with_default.size(); i < iter_limit; ++i) {
for (int i = 0, end = params_with_default.size(); i < end; ++i) {
attrs_.push_back(params_with_default[i].GetName());
}