Use "empty" member function to test for emptiness

PiperOrigin-RevId: 157483181
This commit is contained in:
A. Unique TensorFlower 2017-05-30 10:52:47 -07:00 committed by TensorFlower Gardener
parent 6c3b15915d
commit 7280dafca1
37 changed files with 51 additions and 51 deletions

View File

@ -243,7 +243,7 @@ TEST(GraphCycles, RandomizedTest) {
break;
case 1: // Remove a node
if (nodes.size() > 0) {
if (!nodes.empty()) {
int node_index = RandomNode(&rnd, &nodes);
int node = nodes[node_index];
nodes[node_index] = nodes.back();
@ -263,7 +263,7 @@ TEST(GraphCycles, RandomizedTest) {
break;
case 2: // Add an edge
if (nodes.size() > 0) {
if (!nodes.empty()) {
int from = RandomNode(&rnd, &nodes);
int to = RandomNode(&rnd, &nodes);
if (EdgeIndex(&edges, nodes[from], nodes[to]) == -1) {
@ -282,7 +282,7 @@ TEST(GraphCycles, RandomizedTest) {
break;
case 3: // Remove an edge
if (edges.size() > 0) {
if (!edges.empty()) {
int i = RandomEdge(&rnd, &edges);
int from = edges[i].from;
int to = edges[i].to;
@ -296,7 +296,7 @@ TEST(GraphCycles, RandomizedTest) {
break;
case 4: // Check a path
if (nodes.size() > 0) {
if (!nodes.empty()) {
int from = RandomNode(&rnd, &nodes);
int to = RandomNode(&rnd, &nodes);
int32 path[2 * kMaxNodes];

View File

@ -53,7 +53,7 @@ static void AllocateRawFlag() {
static bool ParseDefaultLayout(const string& text, DefaultLayout* layout) {
bool result = true;
std::vector<string> field = tensorflow::str_util::Split(text, ':');
if (field.size() > 0) {
if (!field.empty()) {
if (field[0] == "random") {
layout->dimension_order = DefaultLayout::DimensionOrder::kRandom;
if (field.size() > 1) {

View File

@ -150,7 +150,7 @@ void MetricTableReport::AppendCategoryTable() {
// Show the category.
string text = category.category_text;
if (text == "") {
if (text.empty()) {
text = "[no category]";
}
tensorflow::strings::StrAppend(&text, " (", category.entries.size(), " ",
@ -200,7 +200,7 @@ void MetricTableReport::AppendEntryTable() {
metric_sum += entry.metric;
string text = entry.text;
if (text == "") {
if (text.empty()) {
text = "[no entry text]";
}
AppendTableRow(text, entry.metric, metric_sum);

View File

@ -227,7 +227,7 @@ StatusOr<Shape> InferWindowOutputShape(const Shape& base_shape,
/* static */ StatusOr<Shape> ShapeInference::InferConcatOpShape(
tensorflow::gtl::ArraySlice<const Shape*> arg_shapes,
const int64 dimension) {
if (arg_shapes.size() == 0) {
if (arg_shapes.empty()) {
return InvalidArgument("Concatenate expects at least one argument");
}
if (dimension < 0 || dimension >= ShapeUtil::Rank(*arg_shapes[0])) {
@ -679,7 +679,7 @@ ShapeInference::InferDegenerateDimensionBroadcastShape(
/* static */ StatusOr<Shape> ShapeInference::InferMapShape(
tensorflow::gtl::ArraySlice<const Shape*> arg_shapes,
const ProgramShape& to_apply) {
if (arg_shapes.size() == 0) {
if (arg_shapes.empty()) {
return InvalidArgument("Map expects at least one argument");
}

View File

@ -66,7 +66,7 @@ Status TensorStore::AddTensor(const string& name, const TensorAndKey& tk) {
Status TensorStore::SaveTensors(const std::vector<string>& output_names,
SessionState* session_state) {
mutex_lock l(lock_);
if (tensors_.size() != 0) {
if (!tensors_.empty()) {
// Save only the tensors in output_names in the session.
for (const string& name : output_names) {
TensorId id(ParseTensorName(name));

View File

@ -244,7 +244,7 @@ class ColocationGraph {
// members_[old_root].supported_device_types.
MergeSupportedDevices(&members_[new_root].supported_device_types,
members_[old_root].supported_device_types);
if (members_[new_root].supported_device_types.size() == 0) {
if (members_[new_root].supported_device_types.empty()) {
string debug_info;
AddDebugInfo(x_root, &debug_info);
AddDebugInfo(y_root, &debug_info);

View File

@ -264,7 +264,7 @@ Microseconds GreedyScheduler::ComputeSchedule(
for (auto& x : device_states_) {
Sim* sim = x.second;
while (sim->num_running < sim->degree_parallelism &&
sim->ready_nodes.size() > 0) {
!sim->ready_nodes.empty()) {
Event e;
e.node = GetNodeWithHighestPriority(sim->ready_nodes);
e.time = event.time + cost_model_->TimeEstimate(e.node);

View File

@ -60,7 +60,7 @@ void CostModel::MergeFromLocal(const Graph& g, const CostModel& cm) {
time_[global_id] += cm.time_[local_id];
int num_slots = cm.slot_bytes_[local_id].size();
if (num_slots > 0) {
if (slot_bytes_[global_id].size() == 0) {
if (slot_bytes_[global_id].empty()) {
slot_bytes_[global_id].resize(num_slots);
} else {
CHECK_EQ(num_slots, slot_bytes_[global_id].size());
@ -82,7 +82,7 @@ void CostModel::MergeFromGlobal(const CostModel& cm) {
time_[i] += cm.time_[i];
int num_slots = cm.slot_bytes_[i].size();
if (num_slots > 0) {
if (slot_bytes_[i].size() == 0) {
if (slot_bytes_[i].empty()) {
slot_bytes_[i].resize(num_slots);
} else {
CHECK_EQ(num_slots, slot_bytes_[i].size());
@ -138,7 +138,7 @@ void CostModel::SetNumOutputs(const Node* node, int num_outputs) {
auto perslot = &slot_bytes_[id];
auto max_mem_usage = &max_mem_usage_[id];
auto output_port_alloc_ids = &output_port_alloc_ids_[id];
if (perslot->size() > 0) {
if (!perslot->empty()) {
CHECK_EQ(num_outputs, perslot->size()) << "Cannot resize slot_bytes, node="
<< node->name();
} else {

View File

@ -281,7 +281,7 @@ void FIFOQueue::TryDequeueMany(int num_elements, OpKernelContext* ctx,
}
}
}
if (allow_small_batch && queues_[0].size() > 0) {
if (allow_small_batch && !queues_[0].empty()) {
// Request all remaining elements in the queue.
queue_size = queues_[0].size();
attempt->tuple.clear();

View File

@ -119,7 +119,7 @@ void PaddingFIFOQueue::TryDequeueMany(int num_elements, OpKernelContext* ctx,
}
}
}
if (allow_small_batch && queues_[0].size() > 0) {
if (allow_small_batch && !queues_[0].empty()) {
// Request all remaining elements in the queue.
queue_size = queues_[0].size();
attempt->tuples.clear();

View File

@ -358,7 +358,7 @@ void RandomShuffleQueue::TryDequeueMany(int num_elements, OpKernelContext* ctx,
}
}
}
if (allow_small_batch && queues_[0].size() > 0) {
if (allow_small_batch && !queues_[0].empty()) {
// Request all remaining elements in the queue.
queue_size = queues_[0].size();
attempt->tuple.clear();

View File

@ -105,7 +105,7 @@ void RangeSampler::SampleBatchGetExpectedCountAvoid(
num_tries = batch_size;
}
// Compute the expected counts of the batch and the extra values
if (batch_expected_count.size() > 0) {
if (!batch_expected_count.empty()) {
CHECK_EQ(batch_size, batch_expected_count.size());
for (int i = 0; i < batch_size; i++) {
batch_expected_count[i] =
@ -131,7 +131,7 @@ void AllSampler::SampleBatchGetExpectedCountAvoid(
for (int i = 0; i < batch_size; i++) {
batch[i] = i;
}
if (batch_expected_count.size() > 0) {
if (!batch_expected_count.empty()) {
CHECK_EQ(batch_size, batch_expected_count.size());
for (int i = 0; i < batch_size; i++) {
batch_expected_count[i] = 1;
@ -290,7 +290,7 @@ Status FixedUnigramSampler::LoadFromFile(Env* env, const string& vocab_file,
// The vocabulary file should be in csv like format, with the last
// field the weight associated with the word.
std::vector<string> cols = str_util::Split(line, ',');
if (cols.size() == 0) continue;
if (cols.empty()) continue;
// Skip entries that do not belong to this shard.
if (word_id % num_shards_ == shard_) {
float w = 0.0;

View File

@ -298,7 +298,7 @@ class SampleDistortedBoundingBoxOp : public OpKernel {
// Insert the entire image if no bounding boxes are supplied.
const Rectangle image_rect(0, 0, width, height);
if (bounding_boxes.size() < 1) {
if (bounding_boxes.empty()) {
OP_REQUIRES(context, use_image_if_no_bounding_boxes_,
errors::InvalidArgument(
"No bounding boxes provided as input. One must "

View File

@ -127,7 +127,7 @@ class ShuffleDatasetOp : public OpKernel {
}
}
if (buffer_.size() > 0) {
if (!buffer_.empty()) {
*end_of_sequence = false;
// Choose an element to produce uniformly at random, and
// swap the last element into its place in the buffer.

View File

@ -607,8 +607,8 @@ inline void GEPP(
}
for (const auto* left_slice : left_slices) {
const auto& left = *left_slice;
const auto* data3 = (left.data3.size() > 0) ? &left.data3[0] : nullptr;
const auto* data = (left.data.size() > 0) ? &left.data[0] : nullptr;
const auto* data3 = (!left.data3.empty()) ? &left.data3[0] : nullptr;
const auto* data = (!left.data.empty()) ? &left.data[0] : nullptr;
const int num_blocks = left.index3_offset.size();
int begin3 = 0;
int begin = 0;

View File

@ -100,7 +100,7 @@ void ReadCSVFileToComplexVectorOrDie(
}
std::vector<string> lines = str_util::Split(data_string, '\n');
for (const string& line : lines) {
if (line == "") {
if (line.empty()) {
continue;
}
std::vector<std::complex<double> > data_line;

View File

@ -147,7 +147,7 @@ class StackOp : public OpKernel {
explicit StackOp(OpKernelConstruction* context) : OpKernel(context) {
OP_REQUIRES_OK(context, context->GetAttr("elem_type", &elem_type_));
OP_REQUIRES_OK(context, context->GetAttr("stack_name", &stack_name_));
if (stack_name_ == "") stack_name_ = name();
if (stack_name_.empty()) stack_name_ = name();
}
void Compute(OpKernelContext* ctx) override {

View File

@ -29,7 +29,7 @@ namespace tensorflow {
namespace {
std::vector<string> Split(const string& str, const string& delimiter) {
if (delimiter.size()) {
if (!delimiter.empty()) {
return str_util::Split(str, delimiter, str_util::SkipEmpty());
}
std::vector<string> char_vector(str.size());

View File

@ -149,7 +149,7 @@ class SummaryMergeOp : public OpKernel {
const string& tag = summary_in.value(v).tag();
// The tag is unused by the TensorSummary op, so no need to check
// for duplicates.
if ((tag != "") && !tags.insert(tag).second) {
if ((!tag.empty()) && !tags.insert(tag).second) {
c->SetStatus(errors::InvalidArgument(strings::StrCat(
"Duplicate tag ", tag, " found in summary inputs")));
return;

View File

@ -156,7 +156,7 @@ class TensorArrayOp : public TensorArrayCreationOp {
context->GetAttr("clear_after_read", &clear_after_read_));
OP_REQUIRES_OK(context,
context->GetAttr("tensor_array_name", &tensor_array_name_));
if (tensor_array_name_ == "") tensor_array_name_ = name();
if (tensor_array_name_.empty()) tensor_array_name_ = name();
}
Status CreateTensorArray(OpKernelContext* ctx, ResourceMgr* rm,

View File

@ -102,7 +102,7 @@ Status RecordReader::ReadChecksummed(uint64 offset, size_t n,
TF_RETURN_IF_ERROR(zlib_input_stream_->ReadNBytes(expected, storage));
if (storage->size() != expected) {
if (storage->size() == 0) {
if (storage->empty()) {
return errors::OutOfRange("eof");
} else {
return errors::DataLoss("truncated record at ", offset);
@ -121,7 +121,7 @@ Status RecordReader::ReadChecksummed(uint64 offset, size_t n,
StringPiece data;
TF_RETURN_IF_ERROR(src_->Read(offset, expected, &data, &(*storage)[0]));
if (data.size() != expected) {
if (data.size() == 0) {
if (data.empty()) {
return errors::OutOfRange("eof");
} else {
return errors::DataLoss("truncated record at ", offset);

View File

@ -183,7 +183,7 @@ Status SnappyInputBuffer::ReadFromFile() {
// possible that on the last read there isn't enough data in the file to
// fill up the buffer in which case file_->ReadNBytes would return an
// OutOfRange error.
if (data.size() == 0) {
if (data.empty()) {
return errors::OutOfRange("EOF reached");
}
if (errors::IsOutOfRange(s)) {

View File

@ -396,7 +396,7 @@ class Harness : public ::testing::Test {
break;
case 1: {
// Attempt to return something smaller than an existing key
if (result.size() > 0 && result[result.size() - 1] > '\0') {
if (!result.empty() && result[result.size() - 1] > '\0') {
result[result.size() - 1]--;
}
break;

View File

@ -110,7 +110,7 @@ Status ZlibInputStream::ReadFromStream() {
// possible that on the last read there isn't enough data in the stream to
// fill up the buffer in which case input_stream_->ReadNBytes would return an
// OutOfRange error.
if (data.size() == 0) {
if (data.empty()) {
return errors::OutOfRange("EOF reached");
}
if (errors::IsOutOfRange(s)) {

View File

@ -264,7 +264,7 @@ class TestFileSystem : public NullFileSystem {
public:
// Only allow for a single root directory.
Status IsDirectory(const string& dirname) override {
if (dirname == "." || dirname == "") {
if (dirname == "." || dirname.empty()) {
return Status::OK();
}
return Status(tensorflow::error::FAILED_PRECONDITION, "Not a dir");
@ -272,7 +272,7 @@ class TestFileSystem : public NullFileSystem {
// Simulating a FS with a root dir and a single file underneath it.
Status GetChildren(const string& dir, std::vector<string>* result) override {
if (dir == "." || dir == "") {
if (dir == "." || dir.empty()) {
result->push_back("test");
}
return Status::OK();

View File

@ -49,7 +49,7 @@ string FormatLibraryFileName(const string& name, const string& version) {
filename = "lib" + name + "." + version + ".dylib";
}
#else
if (version.size() == 0) {
if (version.empty()) {
filename = "lib" + name + ".so";
} else {
filename = "lib" + name + ".so" + "." + version;

View File

@ -935,8 +935,8 @@ Status FastParseExample(const Config& config,
for (size_t e = start; e < end; ++e) {
status_of_minibatch[minibatch] = FastParseSerializedExample(
serialized[e],
(example_names.size() > 0 ? example_names[e] : "<unknown>"), e,
config, config_index, hasher, &fixed_dense_values,
(!example_names.empty() ? example_names[e] : "<unknown>"), e, config,
config_index, hasher, &fixed_dense_values,
&varlen_dense_buffers[minibatch], &sparse_buffers[minibatch]);
if (!status_of_minibatch[minibatch].ok()) break;
}

View File

@ -323,7 +323,7 @@ Status BatchExampleProtoToTensors(
std::vector<Tensor>* output_sparse_shapes_tensor) {
const int batch_size = examples.size();
const bool has_names = (names.size() > 0);
const bool has_names = (!names.empty());
if (has_names) {
if (names.size() != examples.size()) {
return errors::InvalidArgument(

View File

@ -41,7 +41,7 @@ Status MemmappedFileSystemWriter::SaveTensor(const Tensor& tensor,
" and include [A-Za-z0-9_.]");
}
const auto tensor_data = tensor.tensor_data();
if (0 == tensor_data.size()) {
if (tensor_data.empty()) {
return errors::InvalidArgument(
"MemmappedEnvWritter: saving tensor with 0 size");
}

View File

@ -103,7 +103,7 @@ const char *CudaPtxInMemory::default_text() const {
if (decompressed_ptx_iter != decompressed_ptx_.end()) {
// If the decompressed string is empty, which means the ptx hasn't been
// decompressed, decompress it here.
if (decompressed_ptx_iter->second.size() == 0) {
if (decompressed_ptx_iter->second.empty()) {
decompressed_ptx_iter->second = DecompressPtx(ptx);
}
return decompressed_ptx_iter->second.c_str();
@ -136,7 +136,7 @@ const char *CudaPtxInMemory::text(int compute_capability_major,
if (decompressed_ptx_iter != decompressed_ptx_.end()) {
// If the decompressed string is empty, which means the ptx hasn't been
// decompressed, decompress it here.
if (decompressed_ptx_iter->second.size() == 0) {
if (decompressed_ptx_iter->second.empty()) {
decompressed_ptx_iter->second = DecompressPtx(ptx_iter->second);
}
return decompressed_ptx_iter->second.c_str();

View File

@ -108,7 +108,7 @@ Status FreezeRequantizationRanges(const GraphDef& input_graph_def,
string min_max_log_file;
TF_RETURN_IF_ERROR(
context.GetOneStringParameter("min_max_log_file", "", &min_max_log_file));
if (min_max_log_file == "") {
if (min_max_log_file.empty()) {
return errors::InvalidArgument(
"You must pass a file name to min_max_log_file");
}

View File

@ -149,7 +149,7 @@ string UniqueNodeNameFromInput(const string& input_name) {
result += "__hat__";
}
result += node_name;
if (suffix != "") {
if (!suffix.empty()) {
result += "__port__" + suffix.substr(1, suffix.size() - 1);
}
return result;

View File

@ -32,7 +32,7 @@ Status SetDevice(const GraphDef& input_graph_def,
for (const NodeDef& node : input_graph_def.node()) {
NodeDef* new_node = output_graph_def->mutable_node()->Add();
new_node->CopyFrom(node);
if (!if_default || (node.device() == "")) {
if (!if_default || (node.device().empty())) {
new_node->set_device(new_device);
}
}

View File

@ -76,7 +76,7 @@ Status TypeForPlaceholder(const TransformFuncContext& context,
// Takes a comma-separated string of numbers and parses them into a shape.
bool TensorShapeFromString(const string& shape_string, TensorShape* result) {
if (shape_string == "") {
if (shape_string.empty()) {
return false;
}
std::vector<int64> dims;

View File

@ -184,7 +184,7 @@ Status SummarizeGraph(const GraphDef& graph, const string& graph_path,
++control_edge_count;
}
}
if (node.device() != "") {
if (!node.device().empty()) {
++device_counts[node.device()];
}
if ((node.op() == "Const") || (node.op() == "Variable") ||

View File

@ -252,7 +252,7 @@ Status TransformGraph(const std::vector<string>& inputs,
TransformRegistry* transform_registry = GetTransformRegistry();
for (const auto& transform_info : transform_params) {
const string& transform_name = transform_info.first;
if (transform_name == "") {
if (transform_name.empty()) {
continue;
}
if (!transform_registry->count(transform_name)) {

View File

@ -110,7 +110,7 @@ string CanonicalInputName(const string& input_name) {
string node_name;
string suffix;
NodeNamePartsFromInput(input_name, &prefix, &node_name, &suffix);
if (suffix == "") {
if (suffix.empty()) {
suffix = ":0";
}
return prefix + node_name + suffix;