minor spelling tweaks

This commit is contained in:
Kazuaki Ishizaki 2019-12-20 06:49:46 +09:00
parent 7e17fdff28
commit 27643b326c
91 changed files with 389 additions and 389 deletions

View File

@ -518,7 +518,7 @@ tensorflow::Status UpdateTFE_ContextWithServerDef(
grpc_server->worker_env()->device_mgr->ListDeviceAttributes( grpc_server->worker_env()->device_mgr->ListDeviceAttributes(
&local_device_attributes); &local_device_attributes);
// This request make sure that we can create Rendevzous properly between // This request make sure that we can create Rendezvous properly between
// Local and Remote context. // Local and Remote context.
tensorflow::eager::CreateContextRequest base_request; tensorflow::eager::CreateContextRequest base_request;
for (const auto& da : cluster_device_attributes) { for (const auto& da : cluster_device_attributes) {

View File

@ -213,7 +213,7 @@ TF_CAPI_EXPORT extern void TFE_DeleteTensorDebugInfo(
TFE_TensorDebugInfo* debug_info); TFE_TensorDebugInfo* debug_info);
// Returns the number of dimensions used to represent the tensor on its device. // Returns the number of dimensions used to represent the tensor on its device.
// The number of dimensions used to reprensent the tensor on device can be // The number of dimensions used to represent the tensor on device can be
// different from the number returned by TFE_TensorHandleNumDims. // different from the number returned by TFE_TensorHandleNumDims.
// The return value was current at the time of TFE_TensorDebugInfo creation. // The return value was current at the time of TFE_TensorDebugInfo creation.
TF_CAPI_EXPORT extern int TFE_TensorDebugInfoOnDeviceNumDims( TF_CAPI_EXPORT extern int TFE_TensorDebugInfoOnDeviceNumDims(

View File

@ -284,7 +284,7 @@ class ForwardAccumulator {
// Temporarily push or pop transient state for this accumulator. // Temporarily push or pop transient state for this accumulator.
// //
// Allows an accumulator which is currently processing an operation to // Allows an accumulator which is currently processing an operation to
// temporarily reset its state. Without pushing and poping, accumulators // temporarily reset its state. Without pushing and popping, accumulators
// ignore operations executed as a direct result of their own jvp // ignore operations executed as a direct result of their own jvp
// computations. // computations.
void PushState() { call_state_.emplace(nullptr, false); } void PushState() { call_state_.emplace(nullptr, false); }

View File

@ -529,7 +529,7 @@ typedef struct TF_FilesystemOps {
/// If `statuses` is not null, plugins must fill each element with detailed /// If `statuses` is not null, plugins must fill each element with detailed
/// status for each file, as if calling `path_exists` on each one. Core /// status for each file, as if calling `path_exists` on each one. Core
/// TensorFlow initializes the `statuses` array and plugins must use /// TensorFlow initializes the `statuses` array and plugins must use
/// `TF_SetStatus` to set each element instead of dirrectly assigning. /// `TF_SetStatus` to set each element instead of directly assigning.
/// ///
/// DEFAULT IMPLEMENTATION: Checks existence of every file. Needs /// DEFAULT IMPLEMENTATION: Checks existence of every file. Needs
/// `path_exists`. /// `path_exists`.

View File

@ -32,7 +32,7 @@ namespace tensorflow {
// TODO(b/143949615): After all filesystems are converted, this file will be // TODO(b/143949615): After all filesystems are converted, this file will be
// moved to core/platform, and this class can become a singleton and replace the // moved to core/platform, and this class can become a singleton and replace the
// need for `Env::Default()`. At that time, we might decide to remove the need // need for `Env::Default()`. At that time, we might decide to remove the need
// for `Env::Default()` altoghether, but that's a different project, not in // for `Env::Default()` altogether, but that's a different project, not in
// scope for now. I'm just mentioning this here as that transition will mean // scope for now. I'm just mentioning this here as that transition will mean
// removal of the registration part from `Env` and adding it here instead: we // removal of the registration part from `Env` and adding it here instead: we
// will need tables to hold for each scheme the function tables that implement // will need tables to hold for each scheme the function tables that implement

View File

@ -146,7 +146,7 @@ int ModularFileSystemTest::rng_val_;
// As some of the implementations might be missing, the tests should still pass // As some of the implementations might be missing, the tests should still pass
// if the returned `Status` signals the unimplemented state. // if the returned `Status` signals the unimplemented state.
bool UninmplementedOrReturnsCode(Status actual_status, Code expected_code) { bool UnimplementedOrReturnsCode(Status actual_status, Code expected_code) {
Code actual_code = actual_status.code(); Code actual_code = actual_status.code();
return (actual_code == Code::UNIMPLEMENTED) || (actual_code == expected_code); return (actual_code == Code::UNIMPLEMENTED) || (actual_code == expected_code);
} }
@ -193,14 +193,14 @@ TEST_P(ModularFileSystemTest, TestCreateFile) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
Status status = env_->NewWritableFile(filepath, &new_file); Status status = env_->NewWritableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestCreateFileNonExisting) { TEST_P(ModularFileSystemTest, TestCreateFileNonExisting) {
const std::string filepath = GetURIForPath("dir_not_found/a_file"); const std::string filepath = GetURIForPath("dir_not_found/a_file");
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
Status status = env_->NewWritableFile(filepath, &new_file); Status status = env_->NewWritableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestCreateFileExistingDir) { TEST_P(ModularFileSystemTest, TestCreateFileExistingDir) {
@ -210,7 +210,7 @@ TEST_P(ModularFileSystemTest, TestCreateFileExistingDir) {
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
status = env_->NewWritableFile(filepath, &new_file); status = env_->NewWritableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateFilePathIsInvalid) { TEST_P(ModularFileSystemTest, TestCreateFilePathIsInvalid) {
@ -222,21 +222,21 @@ TEST_P(ModularFileSystemTest, TestCreateFilePathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_file"); const std::string new_path = GetURIForPath("a_file/a_file");
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
status = env_->NewWritableFile(new_path, &new_file); status = env_->NewWritableFile(new_path, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestAppendFile) { TEST_P(ModularFileSystemTest, TestAppendFile) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
Status status = env_->NewAppendableFile(filepath, &new_file); Status status = env_->NewAppendableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestAppendFileNonExisting) { TEST_P(ModularFileSystemTest, TestAppendFileNonExisting) {
const std::string filepath = GetURIForPath("dir_not_found/a_file"); const std::string filepath = GetURIForPath("dir_not_found/a_file");
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
Status status = env_->NewAppendableFile(filepath, &new_file); Status status = env_->NewAppendableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestAppendFileExistingDir) { TEST_P(ModularFileSystemTest, TestAppendFileExistingDir) {
@ -246,7 +246,7 @@ TEST_P(ModularFileSystemTest, TestAppendFileExistingDir) {
std::unique_ptr<WritableFile> new_file; std::unique_ptr<WritableFile> new_file;
status = env_->NewAppendableFile(filepath, &new_file); status = env_->NewAppendableFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateThenAppendFile) { TEST_P(ModularFileSystemTest, TestCreateThenAppendFile) {
@ -258,7 +258,7 @@ TEST_P(ModularFileSystemTest, TestCreateThenAppendFile) {
std::unique_ptr<WritableFile> same_file; std::unique_ptr<WritableFile> same_file;
status = env_->NewAppendableFile(filepath, &same_file); status = env_->NewAppendableFile(filepath, &same_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestAppendFilePathIsInvalid) { TEST_P(ModularFileSystemTest, TestAppendFilePathIsInvalid) {
@ -271,21 +271,21 @@ TEST_P(ModularFileSystemTest, TestAppendFilePathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_file"); const std::string new_path = GetURIForPath("a_file/a_file");
std::unique_ptr<WritableFile> same_file; std::unique_ptr<WritableFile> same_file;
status = env_->NewAppendableFile(new_path, &same_file); status = env_->NewAppendableFile(new_path, &same_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestReadFile) { TEST_P(ModularFileSystemTest, TestReadFile) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
std::unique_ptr<RandomAccessFile> new_file; std::unique_ptr<RandomAccessFile> new_file;
Status status = env_->NewRandomAccessFile(filepath, &new_file); Status status = env_->NewRandomAccessFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestReadFileNonExisting) { TEST_P(ModularFileSystemTest, TestReadFileNonExisting) {
const std::string filepath = GetURIForPath("dir_not_found/a_file"); const std::string filepath = GetURIForPath("dir_not_found/a_file");
std::unique_ptr<RandomAccessFile> new_file; std::unique_ptr<RandomAccessFile> new_file;
Status status = env_->NewRandomAccessFile(filepath, &new_file); Status status = env_->NewRandomAccessFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestReadFileExistingDir) { TEST_P(ModularFileSystemTest, TestReadFileExistingDir) {
@ -295,7 +295,7 @@ TEST_P(ModularFileSystemTest, TestReadFileExistingDir) {
std::unique_ptr<RandomAccessFile> new_file; std::unique_ptr<RandomAccessFile> new_file;
status = env_->NewRandomAccessFile(filepath, &new_file); status = env_->NewRandomAccessFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateThenReadFile) { TEST_P(ModularFileSystemTest, TestCreateThenReadFile) {
@ -307,7 +307,7 @@ TEST_P(ModularFileSystemTest, TestCreateThenReadFile) {
std::unique_ptr<RandomAccessFile> same_file; std::unique_ptr<RandomAccessFile> same_file;
status = env_->NewRandomAccessFile(filepath, &same_file); status = env_->NewRandomAccessFile(filepath, &same_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestReadFilePathIsInvalid) { TEST_P(ModularFileSystemTest, TestReadFilePathIsInvalid) {
@ -320,21 +320,21 @@ TEST_P(ModularFileSystemTest, TestReadFilePathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_file"); const std::string new_path = GetURIForPath("a_file/a_file");
std::unique_ptr<RandomAccessFile> same_file; std::unique_ptr<RandomAccessFile> same_file;
status = env_->NewRandomAccessFile(new_path, &same_file); status = env_->NewRandomAccessFile(new_path, &same_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateMemoryRegion) { TEST_P(ModularFileSystemTest, TestCreateMemoryRegion) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
std::unique_ptr<ReadOnlyMemoryRegion> region; std::unique_ptr<ReadOnlyMemoryRegion> region;
Status status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region); Status status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestCreateMemoryRegionNonExisting) { TEST_P(ModularFileSystemTest, TestCreateMemoryRegionNonExisting) {
const std::string filepath = GetURIForPath("dir_not_found/a_file"); const std::string filepath = GetURIForPath("dir_not_found/a_file");
std::unique_ptr<ReadOnlyMemoryRegion> region; std::unique_ptr<ReadOnlyMemoryRegion> region;
Status status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region); Status status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestCreateMemoryRegionExistingDir) { TEST_P(ModularFileSystemTest, TestCreateMemoryRegionExistingDir) {
@ -344,7 +344,7 @@ TEST_P(ModularFileSystemTest, TestCreateMemoryRegionExistingDir) {
std::unique_ptr<ReadOnlyMemoryRegion> new_file; std::unique_ptr<ReadOnlyMemoryRegion> new_file;
status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &new_file); status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &new_file);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromEmptyFile) { TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromEmptyFile) {
@ -356,7 +356,7 @@ TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromEmptyFile) {
std::unique_ptr<ReadOnlyMemoryRegion> region; std::unique_ptr<ReadOnlyMemoryRegion> region;
status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region); status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::INVALID_ARGUMENT); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::INVALID_ARGUMENT);
} }
TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromFile) { TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromFile) {
@ -376,7 +376,7 @@ TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromFile) {
std::unique_ptr<ReadOnlyMemoryRegion> region; std::unique_ptr<ReadOnlyMemoryRegion> region;
status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region); status = env_->NewReadOnlyMemoryRegionFromFile(filepath, &region);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) if (!status.ok())
GTEST_SKIP() << "NewReadOnlyMemoryRegionFromFile() not supported: " GTEST_SKIP() << "NewReadOnlyMemoryRegionFromFile() not supported: "
<< status; << status;
@ -395,19 +395,19 @@ TEST_P(ModularFileSystemTest, TestCreateMemoryRegionFromFilePathIsInvalid) {
std::string new_path = GetURIForPath("a_file/a_file"); std::string new_path = GetURIForPath("a_file/a_file");
std::unique_ptr<ReadOnlyMemoryRegion> region; std::unique_ptr<ReadOnlyMemoryRegion> region;
status = env_->NewReadOnlyMemoryRegionFromFile(new_path, &region); status = env_->NewReadOnlyMemoryRegionFromFile(new_path, &region);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCreateDir) { TEST_P(ModularFileSystemTest, TestCreateDir) {
const std::string dirpath = GetURIForPath("a_dir"); const std::string dirpath = GetURIForPath("a_dir");
Status status = env_->CreateDir(dirpath); Status status = env_->CreateDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestCreateDirNoParent) { TEST_P(ModularFileSystemTest, TestCreateDirNoParent) {
const std::string dirpath = GetURIForPath("dir_not_found/a_dir"); const std::string dirpath = GetURIForPath("dir_not_found/a_dir");
Status status = env_->CreateDir(dirpath); Status status = env_->CreateDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestCreateDirWhichIsFile) { TEST_P(ModularFileSystemTest, TestCreateDirWhichIsFile) {
@ -418,7 +418,7 @@ TEST_P(ModularFileSystemTest, TestCreateDirWhichIsFile) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->CreateDir(filepath); status = env_->CreateDir(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::ALREADY_EXISTS); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::ALREADY_EXISTS);
} }
TEST_P(ModularFileSystemTest, TestCreateDirTwice) { TEST_P(ModularFileSystemTest, TestCreateDirTwice) {
@ -427,7 +427,7 @@ TEST_P(ModularFileSystemTest, TestCreateDirTwice) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->CreateDir(dirpath); status = env_->CreateDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::ALREADY_EXISTS); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::ALREADY_EXISTS);
} }
TEST_P(ModularFileSystemTest, TestCreateDirPathIsInvalid) { TEST_P(ModularFileSystemTest, TestCreateDirPathIsInvalid) {
@ -439,7 +439,7 @@ TEST_P(ModularFileSystemTest, TestCreateDirPathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_dir"); const std::string new_path = GetURIForPath("a_file/a_dir");
status = env_->CreateDir(new_path); status = env_->CreateDir(new_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestRecursivelyCreateDir) { TEST_P(ModularFileSystemTest, TestRecursivelyCreateDir) {
@ -528,7 +528,7 @@ TEST_P(ModularFileSystemTest, TestDeleteFile) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->DeleteFile(filepath); status = env_->DeleteFile(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestDeleteFileFromDirectory) { TEST_P(ModularFileSystemTest, TestDeleteFileFromDirectory) {
@ -543,13 +543,13 @@ TEST_P(ModularFileSystemTest, TestDeleteFileFromDirectory) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->DeleteFile(filepath); status = env_->DeleteFile(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestDeleteFileDoesNotExist) { TEST_P(ModularFileSystemTest, TestDeleteFileDoesNotExist) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
Status status = env_->DeleteFile(filepath); Status status = env_->DeleteFile(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestDeleteFileWhichIsDirectory) { TEST_P(ModularFileSystemTest, TestDeleteFileWhichIsDirectory) {
@ -558,7 +558,7 @@ TEST_P(ModularFileSystemTest, TestDeleteFileWhichIsDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->DeleteFile(dirpath); status = env_->DeleteFile(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestDeleteFilePathIsInvalid) { TEST_P(ModularFileSystemTest, TestDeleteFilePathIsInvalid) {
@ -570,7 +570,7 @@ TEST_P(ModularFileSystemTest, TestDeleteFilePathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_new_file"); const std::string new_path = GetURIForPath("a_file/a_new_file");
status = env_->DeleteFile(new_path); status = env_->DeleteFile(new_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectory) { TEST_P(ModularFileSystemTest, TestDeleteDirectory) {
@ -579,7 +579,7 @@ TEST_P(ModularFileSystemTest, TestDeleteDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->DeleteDir(dirpath); status = env_->DeleteDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectoryFromDirectory) { TEST_P(ModularFileSystemTest, TestDeleteDirectoryFromDirectory) {
@ -591,13 +591,13 @@ TEST_P(ModularFileSystemTest, TestDeleteDirectoryFromDirectory) {
EXPECT_EQ(env_->CreateDir(target_path).code(), Code::OK); EXPECT_EQ(env_->CreateDir(target_path).code(), Code::OK);
status = env_->DeleteDir(target_path); status = env_->DeleteDir(target_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectoryDoesNotExist) { TEST_P(ModularFileSystemTest, TestDeleteDirectoryDoesNotExist) {
const std::string dirpath = GetURIForPath("a_dir"); const std::string dirpath = GetURIForPath("a_dir");
Status status = env_->DeleteDir(dirpath); Status status = env_->DeleteDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectoryNotEmpty) { TEST_P(ModularFileSystemTest, TestDeleteDirectoryNotEmpty) {
@ -612,7 +612,7 @@ TEST_P(ModularFileSystemTest, TestDeleteDirectoryNotEmpty) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->DeleteDir(dirpath); status = env_->DeleteDir(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectoryWhichIsFile) { TEST_P(ModularFileSystemTest, TestDeleteDirectoryWhichIsFile) {
@ -623,7 +623,7 @@ TEST_P(ModularFileSystemTest, TestDeleteDirectoryWhichIsFile) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->DeleteDir(filepath); status = env_->DeleteDir(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestDeleteDirectoryPathIsInvalid) { TEST_P(ModularFileSystemTest, TestDeleteDirectoryPathIsInvalid) {
@ -635,7 +635,7 @@ TEST_P(ModularFileSystemTest, TestDeleteDirectoryPathIsInvalid) {
const std::string new_path = GetURIForPath("a_file/a_dir"); const std::string new_path = GetURIForPath("a_file/a_dir");
status = env_->DeleteDir(new_path); status = env_->DeleteDir(new_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestDeleteRecursivelyEmpty) { TEST_P(ModularFileSystemTest, TestDeleteRecursivelyEmpty) {
@ -774,13 +774,13 @@ TEST_P(ModularFileSystemTest, TestRenameFile) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->RenameFile(filepath, new_filepath); status = env_->RenameFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
status = env_->FileExists(new_filepath); status = env_->FileExists(new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestRenameFileOverwrite) { TEST_P(ModularFileSystemTest, TestRenameFileOverwrite) {
@ -797,20 +797,20 @@ TEST_P(ModularFileSystemTest, TestRenameFileOverwrite) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->RenameFile(filepath, new_filepath); status = env_->RenameFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
status = env_->FileExists(new_filepath); status = env_->FileExists(new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestRenameFileSourceNotFound) { TEST_P(ModularFileSystemTest, TestRenameFileSourceNotFound) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
Status status = env_->RenameFile(filepath, new_filepath); Status status = env_->RenameFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestRenameFileDestinationParentNotFound) { TEST_P(ModularFileSystemTest, TestRenameFileDestinationParentNotFound) {
@ -822,7 +822,7 @@ TEST_P(ModularFileSystemTest, TestRenameFileDestinationParentNotFound) {
const std::string new_filepath = GetURIForPath("a_dir/a_file"); const std::string new_filepath = GetURIForPath("a_dir/a_file");
status = env_->RenameFile(filepath, new_filepath); status = env_->RenameFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestRenameFileSourceIsDirectory) { TEST_P(ModularFileSystemTest, TestRenameFileSourceIsDirectory) {
@ -832,7 +832,7 @@ TEST_P(ModularFileSystemTest, TestRenameFileSourceIsDirectory) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->RenameFile(dirpath, new_filepath); status = env_->RenameFile(dirpath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestRenameFileTargetIsDirectory) { TEST_P(ModularFileSystemTest, TestRenameFileTargetIsDirectory) {
@ -847,7 +847,7 @@ TEST_P(ModularFileSystemTest, TestRenameFileTargetIsDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->RenameFile(filepath, dirpath); status = env_->RenameFile(filepath, dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestRenameFileSourcePathIsInvalid) { TEST_P(ModularFileSystemTest, TestRenameFileSourcePathIsInvalid) {
@ -860,7 +860,7 @@ TEST_P(ModularFileSystemTest, TestRenameFileSourcePathIsInvalid) {
const std::string old_filepath = GetURIForPath("a_file/x"); const std::string old_filepath = GetURIForPath("a_file/x");
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->RenameFile(old_filepath, new_filepath); status = env_->RenameFile(old_filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestRenameFileTargetPathIsInvalid) { TEST_P(ModularFileSystemTest, TestRenameFileTargetPathIsInvalid) {
@ -878,7 +878,7 @@ TEST_P(ModularFileSystemTest, TestRenameFileTargetPathIsInvalid) {
const std::string new_filepath = GetURIForPath("a_file/a_new_file"); const std::string new_filepath = GetURIForPath("a_file/a_new_file");
status = env_->RenameFile(old_filepath, new_filepath); status = env_->RenameFile(old_filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestRenameFileCompareContents) { TEST_P(ModularFileSystemTest, TestRenameFileCompareContents) {
@ -898,12 +898,12 @@ TEST_P(ModularFileSystemTest, TestRenameFileCompareContents) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->RenameFile(filepath, new_filepath); status = env_->RenameFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status;
uint64 size; uint64 size;
status = env_->GetFileSize(new_filepath, &size); status = env_->GetFileSize(new_filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status;
EXPECT_EQ(size, test_data.size()); EXPECT_EQ(size, test_data.size());
} }
@ -917,13 +917,13 @@ TEST_P(ModularFileSystemTest, TestCopyFile) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->CopyFile(filepath, new_filepath); status = env_->CopyFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "CopyFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CopyFile() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
status = env_->FileExists(new_filepath); status = env_->FileExists(new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestCopyFileOverwrite) { TEST_P(ModularFileSystemTest, TestCopyFileOverwrite) {
@ -940,20 +940,20 @@ TEST_P(ModularFileSystemTest, TestCopyFileOverwrite) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->CopyFile(filepath, new_filepath); status = env_->CopyFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "CopyFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CopyFile() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
status = env_->FileExists(new_filepath); status = env_->FileExists(new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestCopyFileSourceNotFound) { TEST_P(ModularFileSystemTest, TestCopyFileSourceNotFound) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
Status status = env_->CopyFile(filepath, new_filepath); Status status = env_->CopyFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestCopyFileSourceIsDirectory) { TEST_P(ModularFileSystemTest, TestCopyFileSourceIsDirectory) {
@ -963,7 +963,7 @@ TEST_P(ModularFileSystemTest, TestCopyFileSourceIsDirectory) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->CopyFile(dirpath, new_filepath); status = env_->CopyFile(dirpath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCopyFileTargetIsDirectory) { TEST_P(ModularFileSystemTest, TestCopyFileTargetIsDirectory) {
@ -978,7 +978,7 @@ TEST_P(ModularFileSystemTest, TestCopyFileTargetIsDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->CopyFile(filepath, dirpath); status = env_->CopyFile(filepath, dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCopyFileSourcePathIsInvalid) { TEST_P(ModularFileSystemTest, TestCopyFileSourcePathIsInvalid) {
@ -991,7 +991,7 @@ TEST_P(ModularFileSystemTest, TestCopyFileSourcePathIsInvalid) {
const std::string old_filepath = GetURIForPath("a_file/x"); const std::string old_filepath = GetURIForPath("a_file/x");
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->CopyFile(old_filepath, new_filepath); status = env_->CopyFile(old_filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCopyFileTargetPathIsInvalid) { TEST_P(ModularFileSystemTest, TestCopyFileTargetPathIsInvalid) {
@ -1009,7 +1009,7 @@ TEST_P(ModularFileSystemTest, TestCopyFileTargetPathIsInvalid) {
const std::string new_filepath = GetURIForPath("a_file/a_new_file"); const std::string new_filepath = GetURIForPath("a_file/a_new_file");
status = env_->CopyFile(old_filepath, new_filepath); status = env_->CopyFile(old_filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestCopyFileCompareContents) { TEST_P(ModularFileSystemTest, TestCopyFileCompareContents) {
@ -1029,17 +1029,17 @@ TEST_P(ModularFileSystemTest, TestCopyFileCompareContents) {
const std::string new_filepath = GetURIForPath("a_new_file"); const std::string new_filepath = GetURIForPath("a_new_file");
status = env_->CopyFile(filepath, new_filepath); status = env_->CopyFile(filepath, new_filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "RenameFile() not supported: " << status;
uint64 size; uint64 size;
status = env_->GetFileSize(filepath, &size); status = env_->GetFileSize(filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status;
EXPECT_EQ(size, test_data.size()); EXPECT_EQ(size, test_data.size());
status = env_->GetFileSize(new_filepath, &size); status = env_->GetFileSize(new_filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status;
EXPECT_EQ(size, test_data.size()); EXPECT_EQ(size, test_data.size());
} }
@ -1052,7 +1052,7 @@ TEST_P(ModularFileSystemTest, TestFileExists) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestFileExistsButIsDirectory) { TEST_P(ModularFileSystemTest, TestFileExistsButIsDirectory) {
@ -1061,13 +1061,13 @@ TEST_P(ModularFileSystemTest, TestFileExistsButIsDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->FileExists(filepath); status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestFileExistsNotFound) { TEST_P(ModularFileSystemTest, TestFileExistsNotFound) {
const std::string filepath = GetURIForPath("a_file"); const std::string filepath = GetURIForPath("a_file");
Status status = env_->FileExists(filepath); Status status = env_->FileExists(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestFileExistsPathIsInvalid) { TEST_P(ModularFileSystemTest, TestFileExistsPathIsInvalid) {
@ -1079,7 +1079,7 @@ TEST_P(ModularFileSystemTest, TestFileExistsPathIsInvalid) {
const std::string target_path = GetURIForPath("a_file/a_new_file"); const std::string target_path = GetURIForPath("a_file/a_new_file");
status = env_->FileExists(target_path); status = env_->FileExists(target_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestFilesExist) { TEST_P(ModularFileSystemTest, TestFilesExist) {
@ -1098,7 +1098,7 @@ TEST_P(ModularFileSystemTest, TestFilesExist) {
EXPECT_TRUE(env_->FilesExist(filenames, &statuses)); EXPECT_TRUE(env_->FilesExist(filenames, &statuses));
EXPECT_EQ(statuses.size(), filenames.size()); EXPECT_EQ(statuses.size(), filenames.size());
for (const auto& status : statuses) for (const auto& status : statuses)
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestFilesExistAllFailureModes) { TEST_P(ModularFileSystemTest, TestFilesExistAllFailureModes) {
@ -1121,11 +1121,11 @@ TEST_P(ModularFileSystemTest, TestFilesExistAllFailureModes) {
std::vector<Status> statuses; std::vector<Status> statuses;
EXPECT_FALSE(env_->FilesExist(filenames, &statuses)); EXPECT_FALSE(env_->FilesExist(filenames, &statuses));
EXPECT_EQ(statuses.size(), filenames.size()); EXPECT_EQ(statuses.size(), filenames.size());
EXPECT_PRED2(UninmplementedOrReturnsCode, statuses[0], Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, statuses[0], Code::OK);
EXPECT_PRED2(UninmplementedOrReturnsCode, statuses[1], Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, statuses[1], Code::OK);
EXPECT_PRED2(UninmplementedOrReturnsCode, statuses[2], EXPECT_PRED2(UnimplementedOrReturnsCode, statuses[2],
Code::FAILED_PRECONDITION); Code::FAILED_PRECONDITION);
EXPECT_PRED2(UninmplementedOrReturnsCode, statuses[3], Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, statuses[3], Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestFilesExistsNoFiles) { TEST_P(ModularFileSystemTest, TestFilesExistsNoFiles) {
@ -1146,7 +1146,7 @@ TEST_P(ModularFileSystemTest, TestStatEmptyFile) {
FileStatistics stat; FileStatistics stat;
status = env_->Stat(filepath, &stat); status = env_->Stat(filepath, &stat);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status;
EXPECT_FALSE(stat.is_directory); EXPECT_FALSE(stat.is_directory);
EXPECT_EQ(stat.length, 0); EXPECT_EQ(stat.length, 0);
@ -1169,7 +1169,7 @@ TEST_P(ModularFileSystemTest, TestStatNonEmptyFile) {
FileStatistics stat; FileStatistics stat;
status = env_->Stat(filepath, &stat); status = env_->Stat(filepath, &stat);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status;
EXPECT_FALSE(stat.is_directory); EXPECT_FALSE(stat.is_directory);
EXPECT_EQ(stat.length, test_data.size()); EXPECT_EQ(stat.length, test_data.size());
@ -1182,7 +1182,7 @@ TEST_P(ModularFileSystemTest, TestStatDirectory) {
FileStatistics stat; FileStatistics stat;
status = env_->Stat(dirpath, &stat); status = env_->Stat(dirpath, &stat);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Stat() not supported: " << status;
EXPECT_TRUE(stat.is_directory); EXPECT_TRUE(stat.is_directory);
} }
@ -1191,7 +1191,7 @@ TEST_P(ModularFileSystemTest, TestStatNotFound) {
const std::string dirpath = GetURIForPath("a_dir"); const std::string dirpath = GetURIForPath("a_dir");
FileStatistics stat; FileStatistics stat;
Status status = env_->Stat(dirpath, &stat); Status status = env_->Stat(dirpath, &stat);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestStatPathIsInvalid) { TEST_P(ModularFileSystemTest, TestStatPathIsInvalid) {
@ -1204,7 +1204,7 @@ TEST_P(ModularFileSystemTest, TestStatPathIsInvalid) {
const std::string target_path = GetURIForPath("a_file/a_new_file"); const std::string target_path = GetURIForPath("a_file/a_new_file");
FileStatistics stat; FileStatistics stat;
status = env_->Stat(target_path, &stat); status = env_->Stat(target_path, &stat);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestIsDirectory) { TEST_P(ModularFileSystemTest, TestIsDirectory) {
@ -1213,7 +1213,7 @@ TEST_P(ModularFileSystemTest, TestIsDirectory) {
if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "CreateDir() not supported: " << status;
status = env_->IsDirectory(dirpath); status = env_->IsDirectory(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
} }
TEST_P(ModularFileSystemTest, TestIsDirectoryFile) { TEST_P(ModularFileSystemTest, TestIsDirectoryFile) {
@ -1224,13 +1224,13 @@ TEST_P(ModularFileSystemTest, TestIsDirectoryFile) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = env_->IsDirectory(filepath); status = env_->IsDirectory(filepath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestIsDirectoryNotFound) { TEST_P(ModularFileSystemTest, TestIsDirectoryNotFound) {
const std::string dirpath = GetURIForPath("a_dir"); const std::string dirpath = GetURIForPath("a_dir");
Status status = env_->IsDirectory(dirpath); Status status = env_->IsDirectory(dirpath);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestIsDirectoryPathIsInvalid) { TEST_P(ModularFileSystemTest, TestIsDirectoryPathIsInvalid) {
@ -1242,7 +1242,7 @@ TEST_P(ModularFileSystemTest, TestIsDirectoryPathIsInvalid) {
const std::string target_path = GetURIForPath("a_file/a_new_file"); const std::string target_path = GetURIForPath("a_file/a_new_file");
status = env_->IsDirectory(target_path); status = env_->IsDirectory(target_path);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestGetFileSizeEmptyFile) { TEST_P(ModularFileSystemTest, TestGetFileSizeEmptyFile) {
@ -1254,7 +1254,7 @@ TEST_P(ModularFileSystemTest, TestGetFileSizeEmptyFile) {
uint64 size; uint64 size;
status = env_->GetFileSize(filepath, &size); status = env_->GetFileSize(filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status;
EXPECT_EQ(size, 0); EXPECT_EQ(size, 0);
} }
@ -1276,7 +1276,7 @@ TEST_P(ModularFileSystemTest, TestGetFileSizeNonEmptyFile) {
uint64 size; uint64 size;
status = env_->GetFileSize(filepath, &size); status = env_->GetFileSize(filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetFileSize() not supported: " << status;
EXPECT_EQ(size, test_data.size()); EXPECT_EQ(size, test_data.size());
} }
@ -1288,14 +1288,14 @@ TEST_P(ModularFileSystemTest, TestGetFileSizeDirectory) {
uint64 size; uint64 size;
status = env_->GetFileSize(dirpath, &size); status = env_->GetFileSize(dirpath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestGetFileSizeNotFound) { TEST_P(ModularFileSystemTest, TestGetFileSizeNotFound) {
const std::string filepath = GetURIForPath("a_dir"); const std::string filepath = GetURIForPath("a_dir");
uint64 size; uint64 size;
Status status = env_->GetFileSize(filepath, &size); Status status = env_->GetFileSize(filepath, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestGetFileSizePathIsInvalid) { TEST_P(ModularFileSystemTest, TestGetFileSizePathIsInvalid) {
@ -1308,7 +1308,7 @@ TEST_P(ModularFileSystemTest, TestGetFileSizePathIsInvalid) {
const std::string target_path = GetURIForPath("a_file/a_new_file"); const std::string target_path = GetURIForPath("a_file/a_new_file");
uint64 size; uint64 size;
status = env_->GetFileSize(target_path, &size); status = env_->GetFileSize(target_path, &size);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestGetChildren) { TEST_P(ModularFileSystemTest, TestGetChildren) {
@ -1340,7 +1340,7 @@ TEST_P(ModularFileSystemTest, TestGetChildren) {
std::vector<std::string> children; std::vector<std::string> children;
status = env_->GetChildren(dirpath, &children); status = env_->GetChildren(dirpath, &children);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "GetChildren() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "GetChildren() not supported: " << status;
// All entries must show up in the vector. // All entries must show up in the vector.
@ -1360,7 +1360,7 @@ TEST_P(ModularFileSystemTest, TestGetChildrenEmpty) {
std::vector<std::string> children; std::vector<std::string> children;
status = env_->GetChildren(dirpath, &children); status = env_->GetChildren(dirpath, &children);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
EXPECT_EQ(children.size(), 0); EXPECT_EQ(children.size(), 0);
} }
@ -1373,14 +1373,14 @@ TEST_P(ModularFileSystemTest, TestGetChildrenOfFile) {
std::vector<std::string> children; std::vector<std::string> children;
status = env_->GetChildren(filepath, &children); status = env_->GetChildren(filepath, &children);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestGetChildrenPathNotFound) { TEST_P(ModularFileSystemTest, TestGetChildrenPathNotFound) {
const std::string target_path = GetURIForPath("a_dir"); const std::string target_path = GetURIForPath("a_dir");
std::vector<std::string> children; std::vector<std::string> children;
Status status = env_->GetChildren(target_path, &children); Status status = env_->GetChildren(target_path, &children);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::NOT_FOUND); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::NOT_FOUND);
} }
TEST_P(ModularFileSystemTest, TestGetChildrenPathIsInvalid) { TEST_P(ModularFileSystemTest, TestGetChildrenPathIsInvalid) {
@ -1393,7 +1393,7 @@ TEST_P(ModularFileSystemTest, TestGetChildrenPathIsInvalid) {
const std::string target_path = GetURIForPath("a_file/a_new_dir"); const std::string target_path = GetURIForPath("a_file/a_new_dir");
std::vector<std::string> children; std::vector<std::string> children;
status = env_->GetChildren(target_path, &children); status = env_->GetChildren(target_path, &children);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::FAILED_PRECONDITION); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::FAILED_PRECONDITION);
} }
TEST_P(ModularFileSystemTest, TestGetMatchingPaths) { TEST_P(ModularFileSystemTest, TestGetMatchingPaths) {
@ -1422,7 +1422,7 @@ TEST_P(ModularFileSystemTest, TestGetMatchingPaths) {
std::vector<std::string> results; std::vector<std::string> results;
Status status = env_->GetMatchingPaths(GetURIForPath("/a*"), &results); Status status = env_->GetMatchingPaths(GetURIForPath("/a*"), &results);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) if (!status.ok())
GTEST_SKIP() << "GetMatchingPaths() not supported: " << status; GTEST_SKIP() << "GetMatchingPaths() not supported: " << status;
EXPECT_EQ(results.size(), matching_filenames.size()); EXPECT_EQ(results.size(), matching_filenames.size());
@ -1433,7 +1433,7 @@ TEST_P(ModularFileSystemTest, TestGetMatchingPaths) {
TEST_P(ModularFileSystemTest, TestGetMatchingPathsEmptyFileSystem) { TEST_P(ModularFileSystemTest, TestGetMatchingPathsEmptyFileSystem) {
std::vector<std::string> results; std::vector<std::string> results;
Status status = env_->GetMatchingPaths(GetURIForPath("a*"), &results); Status status = env_->GetMatchingPaths(GetURIForPath("a*"), &results);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
EXPECT_EQ(results.size(), 0); EXPECT_EQ(results.size(), 0);
} }
@ -1454,7 +1454,7 @@ TEST_P(ModularFileSystemTest, TestGetMatchingPathsEmptyPattern) {
std::vector<std::string> results; std::vector<std::string> results;
Status status = env_->GetMatchingPaths(GetURIForPath(""), &results); Status status = env_->GetMatchingPaths(GetURIForPath(""), &results);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) if (!status.ok())
GTEST_SKIP() << "GetMatchingPaths() not supported: " << status; GTEST_SKIP() << "GetMatchingPaths() not supported: " << status;
EXPECT_EQ(results.size(), 1); EXPECT_EQ(results.size(), 1);
@ -1479,7 +1479,7 @@ TEST_P(ModularFileSystemTest, TestGetMatchingPathsLiteralMatch) {
std::vector<std::string> results; std::vector<std::string> results;
Status status = env_->GetMatchingPaths(filenames[0], &results); Status status = env_->GetMatchingPaths(filenames[0], &results);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) if (!status.ok())
GTEST_SKIP() << "GetMatchingPaths() not supported: " << status; GTEST_SKIP() << "GetMatchingPaths() not supported: " << status;
EXPECT_EQ(results.size(), 1); EXPECT_EQ(results.size(), 1);
@ -1506,7 +1506,7 @@ TEST_P(ModularFileSystemTest, TestGetMatchingPathsNoMatch) {
Status status = env_->GetMatchingPaths(GetURIForPath("x?y*"), &results); Status status = env_->GetMatchingPaths(GetURIForPath("x?y*"), &results);
if (!status.ok()) if (!status.ok())
GTEST_SKIP() << "GetMatchingPaths() not supported: " << status; GTEST_SKIP() << "GetMatchingPaths() not supported: " << status;
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
EXPECT_EQ(results.size(), 0); EXPECT_EQ(results.size(), 0);
} }
@ -1519,13 +1519,13 @@ TEST_P(ModularFileSystemTest, TestAppendAndTell) {
int64 position; int64 position;
status = file->Tell(&position); status = file->Tell(&position);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Tell() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Tell() not supported: " << status;
EXPECT_EQ(position, 0); EXPECT_EQ(position, 0);
const std::string test_data("asdf"); const std::string test_data("asdf");
status = file->Append(test_data); status = file->Append(test_data);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status;
status = file->Tell(&position); status = file->Tell(&position);
@ -1541,7 +1541,7 @@ TEST_P(ModularFileSystemTest, TestClose) {
GTEST_SKIP() << "NewWritableFile() not supported: " << status; GTEST_SKIP() << "NewWritableFile() not supported: " << status;
status = file->Close(); status = file->Close();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status;
} }
@ -1554,15 +1554,15 @@ TEST_P(ModularFileSystemTest, TestRoundTrip) {
const std::string test_data("asdf"); const std::string test_data("asdf");
status = file->Append(test_data); status = file->Append(test_data);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status;
status = file->Flush(); status = file->Flush();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status;
status = file->Close(); status = file->Close();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status;
std::unique_ptr<RandomAccessFile> read_file; std::unique_ptr<RandomAccessFile> read_file;
@ -1573,7 +1573,7 @@ TEST_P(ModularFileSystemTest, TestRoundTrip) {
char scratch[64 /* big enough to accomodate test_data */] = {0}; char scratch[64 /* big enough to accomodate test_data */] = {0};
StringPiece result; StringPiece result;
status = read_file->Read(0, test_data.size(), &result, scratch); status = read_file->Read(0, test_data.size(), &result, scratch);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
EXPECT_EQ(test_data, result); EXPECT_EQ(test_data, result);
} }
@ -1586,15 +1586,15 @@ TEST_P(ModularFileSystemTest, TestRoundTripWithAppendableFile) {
const std::string test_data("asdf"); const std::string test_data("asdf");
status = file->Append(test_data); status = file->Append(test_data);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status;
status = file->Flush(); status = file->Flush();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status;
status = file->Close(); status = file->Close();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status;
std::unique_ptr<WritableFile> same_file; std::unique_ptr<WritableFile> same_file;
@ -1616,7 +1616,7 @@ TEST_P(ModularFileSystemTest, TestRoundTripWithAppendableFile) {
StringPiece result; StringPiece result;
status = read_file->Read(0, test_data.size() + more_test_data.size(), &result, status = read_file->Read(0, test_data.size() + more_test_data.size(), &result,
scratch); scratch);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
EXPECT_EQ(test_data + more_test_data, result); EXPECT_EQ(test_data + more_test_data, result);
EXPECT_EQ( EXPECT_EQ(
read_file->Read(test_data.size(), more_test_data.size(), &result, scratch) read_file->Read(test_data.size(), more_test_data.size(), &result, scratch)
@ -1634,15 +1634,15 @@ TEST_P(ModularFileSystemTest, TestReadOutOfRange) {
const std::string test_data("asdf"); const std::string test_data("asdf");
status = file->Append(test_data); status = file->Append(test_data);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Append() not supported: " << status;
status = file->Flush(); status = file->Flush();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Flush() not supported: " << status;
status = file->Close(); status = file->Close();
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OK); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OK);
if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status; if (!status.ok()) GTEST_SKIP() << "Close() not supported: " << status;
std::unique_ptr<RandomAccessFile> read_file; std::unique_ptr<RandomAccessFile> read_file;
@ -1654,7 +1654,7 @@ TEST_P(ModularFileSystemTest, TestReadOutOfRange) {
StringPiece result; StringPiece result;
// read at least 1 byte more than test_data // read at least 1 byte more than test_data
status = read_file->Read(0, test_data.size() + 1, &result, scratch); status = read_file->Read(0, test_data.size() + 1, &result, scratch);
EXPECT_PRED2(UninmplementedOrReturnsCode, status, Code::OUT_OF_RANGE); EXPECT_PRED2(UnimplementedOrReturnsCode, status, Code::OUT_OF_RANGE);
} }
// The URI schemes that need to be tested are provided by the user via flags // The URI schemes that need to be tested are provided by the user via flags

View File

@ -44,7 +44,7 @@ int TransferFileContents(const char* src, const char* dst, mode_t mode,
} }
// Both files have been opened, do the transfer. // Both files have been opened, do the transfer.
// Since errno would be overriden by `close` below, save it here. // Since errno would be overridden by `close` below, save it here.
int error_code = 0; int error_code = 0;
if (CopyFileContents(dst_fd, src_fd, size) < 0) error_code = errno; if (CopyFileContents(dst_fd, src_fd, size) < 0) error_code = errno;

View File

@ -133,7 +133,7 @@ TEST(OpsTest, TestShapeInference_VectorizeFunction) {
TEST(OpsTest, AttributeAccessors) { TEST(OpsTest, AttributeAccessors) {
TF_OpDefinitionBuilder* builder = TF_OpDefinitionBuilder* builder =
TF_NewOpDefinitionBuilder("AttributeAccesorsOp"); TF_NewOpDefinitionBuilder("AttributeAccessorsOp");
TF_OpDefinitionBuilderAddAttr(builder, "foo1: int >= 2"); TF_OpDefinitionBuilderAddAttr(builder, "foo1: int >= 2");
TF_OpDefinitionBuilderAddAttr(builder, "foo2: string=\"my string\""); TF_OpDefinitionBuilderAddAttr(builder, "foo2: string=\"my string\"");
TF_OpDefinitionBuilderSetIsCommutative(builder, true); TF_OpDefinitionBuilderSetIsCommutative(builder, true);
@ -151,7 +151,7 @@ TEST(OpsTest, AttributeAccessors) {
op_list.ParseFromArray(op_list_buffer->data, op_list_buffer->length); op_list.ParseFromArray(op_list_buffer->data, op_list_buffer->length);
bool found = false; bool found = false;
for (const auto& op : op_list.op()) { for (const auto& op : op_list.op()) {
if (op.name() == "AttributeAccesorsOp") { if (op.name() == "AttributeAccessorsOp") {
ASSERT_TRUE(op.is_commutative()); ASSERT_TRUE(op.is_commutative());
ASSERT_TRUE(op.is_aggregate()); ASSERT_TRUE(op.is_aggregate());
ASSERT_TRUE(op.allows_uninitialized_input()); ASSERT_TRUE(op.allows_uninitialized_input());

View File

@ -383,7 +383,7 @@ Status TensorInterface::ToTensor(Tensor* dst) const {
if (!dst->scalar<tensorflow::ResourceHandle>()().ParseFromString( if (!dst->scalar<tensorflow::ResourceHandle>()().ParseFromString(
string(static_cast<const char*>(Data()), ByteSize()))) { string(static_cast<const char*>(Data()), ByteSize()))) {
return InvalidArgument( return InvalidArgument(
"Malformed TF_RESOUCE tensor: unable to parse resource handle"); "Malformed TF_RESOURCE tensor: unable to parse resource handle");
} }
return Status::OK(); return Status::OK();
} }

View File

@ -346,8 +346,8 @@ Status SymbolicGradientBuilder::SumGradients(const Output& src, Output* grad) {
"Unable to find backprop list for node.id ", src.node()->name()); "Unable to find backprop list for node.id ", src.node()->name());
} }
const auto& grads = iter->second; const auto& grads = iter->second;
// Filter any backproped 'NoGradient' Outputs from 'grads' (if needed). // Filter any backpropped 'NoGradient' Outputs from 'grads' (if needed).
// Return any valid backproped gradients that remain after filtering, // Return any valid backpropped gradients that remain after filtering,
// or 'NoGradient' otherwise. // or 'NoGradient' otherwise.
std::vector<Output> grads_to_keep; std::vector<Output> grads_to_keep;
for (const Output& o : grads) { for (const Output& o : grads) {
@ -519,7 +519,7 @@ Status SymbolicGradientBuilder::AddGradients() {
// Backprop along the in edges. // Backprop along the in edges.
// TODO(andydavis) Find cleaner way to map each grad output returned by // TODO(andydavis) Find cleaner way to map each grad output returned by
// gradient function to the src node/output to which it should be // gradient function to the src node/output to which it should be
// backproped. Maybe grad functions can return a vector of Output pairs to // backpropped. Maybe grad functions can return a vector of Output pairs to
// make this association explicit. // make this association explicit.
size_t dx_index = 0; size_t dx_index = 0;
for (const Edge* e : n->in_edges()) { for (const Edge* e : n->in_edges()) {

View File

@ -64,7 +64,7 @@ bool IsZero(const Scope& scope, const Output& grad) {
// Multiply after broadcasting vec to match dimensions of mat. // Multiply after broadcasting vec to match dimensions of mat.
// Args: // Args:
// vec: A 1-D tensor of dimension [D0] // vec: A 1-D tensor of dimension [D0]
// mat: A 2-D tensor of dimesnion [D0, D1] // mat: A 2-D tensor of dimension [D0, D1]
// //
// Returns: // Returns:
// A tensor of dimension [D0, D1], the result fo vec * mat. // A tensor of dimension [D0, D1], the result fo vec * mat.

View File

@ -413,7 +413,7 @@ void SideEffectAnalysis::AnalyzeRegion(
// Returns whether an access to `resource` can skip control edges from // Returns whether an access to `resource` can skip control edges from
// previous accesses to unknown resources, due to that earlier accesses to // previous accesses to unknown resources, due to that earlier accesses to
// `resource` already indirectly tracked previous accesses to uknown // `resource` already indirectly tracked previous accesses to unknown
// resources. `read_only` specifies the type of access of the current op being // resources. `read_only` specifies the type of access of the current op being
// considered. // considered.
auto unknown_access_indirectly_tracked_by_resource = [&](int64_t resource, auto unknown_access_indirectly_tracked_by_resource = [&](int64_t resource,

View File

@ -62,7 +62,7 @@ class ResourceAliasAnalysis {
// An analysis that runs on a function and infers the control predecessors and // An analysis that runs on a function and infers the control predecessors and
// successors for each op, based on side-effects on known and unknown resources. // successors for each op, based on side-effects on known and unknown resources.
// Side-effecting ops on uknown resources are conservatively treated as // Side-effecting ops on unknown resources are conservatively treated as
// interfering with all known resource op accesses. It distinguishes accesses // interfering with all known resource op accesses. It distinguishes accesses
// based on whether they are read-only, and read-only ops do not interfer with // based on whether they are read-only, and read-only ops do not interfer with
// each other. // each other.

View File

@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==============================================================================*/ ==============================================================================*/
// This transformation pass transforms MLIR TF contol dialect into a combination // This transformation pass transforms MLIR TF control dialect into a
// of the TF and TF executor dialects. // combination of the TF and TF executor dialects.
// //
// !! This code is only intended for migration purpose and will be deleted when // !! This code is only intended for migration purpose and will be deleted when
// !! the importer is updated to directly emit the tf_executor dialect. // !! the importer is updated to directly emit the tf_executor dialect.

View File

@ -617,7 +617,7 @@ bool TRTEngineOp::ExecuteTrtEngine(OpKernelContext* ctx,
} }
} else { } else {
const string msg = const string msg =
StrCat("Ouput node ", output_name, " not found, at ", name()); StrCat("Output node ", output_name, " not found, at ", name());
LOG(ERROR) << msg; LOG(ERROR) << msg;
ctx->SetStatus(errors::NotFound(msg)); ctx->SetStatus(errors::NotFound(msg));
return !kRetry; return !kRetry;

View File

@ -329,7 +329,7 @@ class XlaBuilder {
int64 target_param_num, int64 target_param_num,
ShapeIndex target_param_index, int64 target_dim_num); ShapeIndex target_param_index, int64 target_dim_num);
// Adds a new input/output alias. Since the input/ouput shape information are // Adds a new input/output alias. Since the input/output shape information are
// not available until the computation is built, and eventual error in the // not available until the computation is built, and eventual error in the
// arguments of this API will be detected only at computation Build() time. // arguments of this API will be detected only at computation Build() time.
void SetUpAlias(const ShapeIndex& output_index, int64 param_number, void SetUpAlias(const ShapeIndex& output_index, int64 param_number,

View File

@ -66,7 +66,7 @@ void SetDefaultLayoutToContainer(T* minor_to_major) {
for (Tile tile : tiles) { for (Tile tile : tiles) {
for (int64 dim : tile.dimensions()) { for (int64 dim : tile.dimensions()) {
if (dim < 0 && dim != Tile::kCombineDimension) { if (dim < 0 && dim != Tile::kCombineDimension) {
LOG(FATAL) << "Tile dimension size needs to be mininum int64 value if " LOG(FATAL) << "Tile dimension size needs to be minimum int64 value if "
"it's negative. Value is " "it's negative. Value is "
<< dim; << dim;
} }

View File

@ -271,7 +271,7 @@ StatusOr<ScopedShapedBuffer> CpuExecutable::CreateResultShapedBuffer(
slice.allocation()->parameter_number(), slice.allocation()->parameter_number(),
slice.allocation()->param_shape_index()); slice.allocation()->param_shape_index());
CHECK(output_alias) CHECK(output_alias)
<< "Ouput buffer is coming from parameter " << "Output buffer is coming from parameter "
<< slice.allocation()->parameter_number() << " at index " << slice.allocation()->parameter_number() << " at index "
<< slice.allocation()->param_shape_index() << slice.allocation()->param_shape_index()
<< ", but no alias exists"; << ", but no alias exists";

View File

@ -734,7 +734,7 @@ StatusOr<llvm::Value*> ElementalIrEmitter::EmitComplexUnaryOp(
// is finite and b is either +/-Inf or NaN, then our normal // is finite and b is either +/-Inf or NaN, then our normal
// calculation would end up returing (+/-1, NaN), as opposed to (NaN, // calculation would end up returing (+/-1, NaN), as opposed to (NaN,
// NaN). // NaN).
// 5/6) We always calculate the imagninary value as sin(2b)/denominator. // 5/6) We always calculate the imaginary value as sin(2b)/denominator.
// When the denominator is infinity, this assures us that the zero is // When the denominator is infinity, this assures us that the zero is
// the correct sign. However if our imaginary input results in // the correct sign. However if our imaginary input results in
// sin(2b) = NaN, we calculate our imaginary result as NaN. // sin(2b) = NaN, we calculate our imaginary result as NaN.

View File

@ -48,7 +48,7 @@ TEST_F(CustomCallTest, IsInvoked) {
TEST_F(CustomCallTest, UnknownTarget) { TEST_F(CustomCallTest, UnknownTarget) {
XlaBuilder b(TestName()); XlaBuilder b(TestName());
CustomCall(&b, "UknownTarget", /*operands=*/{}, ShapeUtil::MakeShape(F32, {}), CustomCall(&b, "UnknownTarget", /*operands=*/{}, ShapeUtil::MakeShape(F32, {}),
/*opaque=*/""); /*opaque=*/"");
ASSERT_FALSE(Execute(&b, {}).ok()); ASSERT_FALSE(Execute(&b, {}).ok());
} }

View File

@ -417,7 +417,7 @@ StatusOr<ExecutionOutput> GpuExecutable::ExecuteAsyncOnStream(
slice.allocation()->parameter_number(), slice.allocation()->parameter_number(),
slice.allocation()->param_shape_index()); slice.allocation()->param_shape_index());
CHECK(output_alias) CHECK(output_alias)
<< "Ouput buffer is coming from parameter " << "Output buffer is coming from parameter "
<< slice.allocation()->parameter_number() << " at index " << slice.allocation()->parameter_number() << " at index "
<< slice.allocation()->param_shape_index() << slice.allocation()->param_shape_index()
<< ", but no alias exists"; << ", but no alias exists";

View File

@ -1599,7 +1599,7 @@ class InstructionVerifier : public DfsHloVisitorWithDefault {
for (int b = 0; b < conditional->branch_count(); ++b) { for (int b = 0; b < conditional->branch_count(); ++b) {
if (conditional->branch_computation(b)->num_parameters() != 1) { if (conditional->branch_computation(b)->num_parameters() != 1) {
return FailedPrecondition( return FailedPrecondition(
"Branch computation %s of %s must have 1 parameter insted of %d", "Branch computation %s of %s must have 1 parameter instead of %d",
conditional->branch_computation(b)->name(), conditional->ToString(), conditional->branch_computation(b)->name(), conditional->ToString(),
conditional->branch_computation(b)->num_parameters()); conditional->branch_computation(b)->num_parameters());
} }

View File

@ -394,10 +394,10 @@ class LayoutAssignment : public HloModulePass {
return Status::OK(); return Status::OK();
} }
// Construct contraints and assign layouts to all instructions in the // Construct constraints and assign layouts to all instructions in the
// computation satisfying the given ComputationLayout, if not nullptr. // computation satisfying the given ComputationLayout, if not nullptr.
// Otherwise the ComputationLayout will be calculated by propagating the // Otherwise the ComputationLayout will be calculated by propagating the
// computation instruction contraints. // computation instruction constraints.
// Layouts constraints are added, then propagated until all LogicalBuffers in // Layouts constraints are added, then propagated until all LogicalBuffers in
// the computation are constrained. // the computation are constrained.
Status RunOnComputation(ComputationLayout* computation_layout, Status RunOnComputation(ComputationLayout* computation_layout,

View File

@ -56,7 +56,7 @@ void FramePair::AdjustBox(const BoundingBox box,
*scale_y = 1.0f; *scale_y = 1.0f;
// The assumption is that all deltas that make it to this stage with a // The assumption is that all deltas that make it to this stage with a
// correspondending optical_flow_found_keypoint_[i] == true are not in // corresponding optical_flow_found_keypoint_[i] == true are not in
// themselves degenerate. // themselves degenerate.
// //
// The degeneracy with scale arose because if the points are too close to the // The degeneracy with scale arose because if the points are too close to the

View File

@ -50,7 +50,7 @@ TrackedObject::~TrackedObject() {}
void TrackedObject::UpdatePosition(const BoundingBox& new_position, void TrackedObject::UpdatePosition(const BoundingBox& new_position,
const int64_t timestamp, const int64_t timestamp,
const ImageData& image_data, const ImageData& image_data,
const bool authoratative) { const bool authoritative) {
last_known_position_ = new_position; last_known_position_ = new_position;
position_last_computed_time_ = timestamp; position_last_computed_time_ = timestamp;
@ -88,7 +88,7 @@ void TrackedObject::UpdatePosition(const BoundingBox& new_position,
if (object_model_ != NULL) { if (object_model_ != NULL) {
object_model_->TrackStep(last_known_position_, *image_data.GetImage(), object_model_->TrackStep(last_known_position_, *image_data.GetImage(),
*image_data.GetIntegralImage(), authoratative); *image_data.GetIntegralImage(), authoritative);
} }
} else if (tracked_match_score_ < kMatchScoreForImmediateTermination) { } else if (tracked_match_score_ < kMatchScoreForImmediateTermination) {
if (num_consecutive_frames_below_threshold_ < 1000) { if (num_consecutive_frames_below_threshold_ < 1000) {

View File

@ -37,7 +37,7 @@ class TrackedObject {
~TrackedObject(); ~TrackedObject();
void UpdatePosition(const BoundingBox& new_position, const int64_t timestamp, void UpdatePosition(const BoundingBox& new_position, const int64_t timestamp,
const ImageData& image_data, const bool authoratative); const ImageData& image_data, const bool authoritative);
// This method is called when the tracked object is detected at a // This method is called when the tracked object is detected at a
// given position, and allows the associated Model to grow and/or prune // given position, and allows the associated Model to grow and/or prune

View File

@ -26,7 +26,7 @@ class RecognizeResult(object):
"""Save recognition result temporarily. """Save recognition result temporarily.
Attributes: Attributes:
founded_command: A string indicating the word just founded. Defualt value founded_command: A string indicating the word just founded. Default value
is '_silence_' is '_silence_'
score: An float representing the confidence of founded word. Default score: An float representing the confidence of founded word. Default
value is zero. value is zero.

View File

@ -398,7 +398,7 @@ if __name__ == '__main__':
'--window_stride_ms', '--window_stride_ms',
type=float, type=float,
default=10.0, default=10.0,
help='How far to move in time between spectogram timeslices.',) help='How far to move in time between spectrogram timeslices.',)
parser.add_argument( parser.add_argument(
'--feature_bin_count', '--feature_bin_count',
type=int, type=int,

View File

@ -53,7 +53,7 @@ def wav_to_features(sample_rate, clip_duration_ms, window_size_ms,
sample_rate: Expected sample rate of the wavs. sample_rate: Expected sample rate of the wavs.
clip_duration_ms: Expected duration in milliseconds of the wavs. clip_duration_ms: Expected duration in milliseconds of the wavs.
window_size_ms: How long each spectrogram timeslice is. window_size_ms: How long each spectrogram timeslice is.
window_stride_ms: How far to move in time between spectogram timeslices. window_stride_ms: How far to move in time between spectrogram timeslices.
feature_bin_count: How many bins to use for the feature fingerprint. feature_bin_count: How many bins to use for the feature fingerprint.
quantize: Whether to train the model for eight-bit deployment. quantize: Whether to train the model for eight-bit deployment.
preprocess: Spectrogram processing mode; "mfcc", "average" or "micro". preprocess: Spectrogram processing mode; "mfcc", "average" or "micro".
@ -153,7 +153,7 @@ if __name__ == '__main__':
'--window_stride_ms', '--window_stride_ms',
type=float, type=float,
default=10.0, default=10.0,
help='How far to move in time between spectogram timeslices.',) help='How far to move in time between spectrogram timeslices.',)
parser.add_argument( parser.add_argument(
'--feature_bin_count', '--feature_bin_count',
type=int, type=int,

View File

@ -25,12 +25,12 @@ import (
// Scope encapsulates common operation properties when building a Graph. // Scope encapsulates common operation properties when building a Graph.
// //
// A Scope object (and its derivates, e.g., obtained from Scope.SubScope) // A Scope object (and its derivatives, e.g., obtained from Scope.SubScope)
// act as a builder for graphs. They allow common properties (such as // act as a builder for graphs. They allow common properties (such as
// a name prefix) to be specified for multiple operations being added // a name prefix) to be specified for multiple operations being added
// to the graph. // to the graph.
// //
// A Scope object and all its derivates (e.g., obtained from Scope.SubScope) // A Scope object and all its derivatives (e.g., obtained from Scope.SubScope)
// are not safe for concurrent use by multiple goroutines. // are not safe for concurrent use by multiple goroutines.
type Scope struct { type Scope struct {
graph *tf.Graph graph *tf.Graph

View File

@ -3614,7 +3614,7 @@ func BoostedTreesSparseCalculateBestFeatureSplitSplitType(value string) BoostedT
// l1: l1 regularization factor on leaf weights, per instance based. // l1: l1 regularization factor on leaf weights, per instance based.
// l2: l2 regularization factor on leaf weights, per instance based. // l2: l2 regularization factor on leaf weights, per instance based.
// tree_complexity: adjustment to the gain, per leaf based. // tree_complexity: adjustment to the gain, per leaf based.
// min_node_weight: mininum avg of hessians in a node before required for the node to be considered for splitting. // min_node_weight: minimum avg of hessians in a node before required for the node to be considered for splitting.
// logits_dimension: The dimension of logit, i.e., number of classes. // logits_dimension: The dimension of logit, i.e., number of classes.
// //
// Returns: // Returns:
@ -3711,7 +3711,7 @@ func BoostedTreesCalculateBestFeatureSplitV2(scope *Scope, node_id_range tf.Outp
// l1: l1 regularization factor on leaf weights, per instance based. // l1: l1 regularization factor on leaf weights, per instance based.
// l2: l2 regularization factor on leaf weights, per instance based. // l2: l2 regularization factor on leaf weights, per instance based.
// tree_complexity: adjustment to the gain, per leaf based. // tree_complexity: adjustment to the gain, per leaf based.
// min_node_weight: mininum avg of hessians in a node before required for the node to be considered for splitting. // min_node_weight: minimum avg of hessians in a node before required for the node to be considered for splitting.
// max_splits: the number of nodes that can be split in the whole tree. Used as a dimension of output tensors. // max_splits: the number of nodes that can be split in the whole tree. Used as a dimension of output tensors.
// //
// Returns: // Returns:
@ -3764,7 +3764,7 @@ func BoostedTreesCalculateBestGainsPerFeature(scope *Scope, node_id_range tf.Out
// Checks whether a tree ensemble has been initialized. // Checks whether a tree ensemble has been initialized.
// //
// Arguments: // Arguments:
// tree_ensemble_handle: Handle to the tree ensemble resouce. // tree_ensemble_handle: Handle to the tree ensemble resource.
// //
// Returns output boolean on whether it is initialized or not. // Returns output boolean on whether it is initialized or not.
func IsBoostedTreesEnsembleInitialized(scope *Scope, tree_ensemble_handle tf.Output) (is_initialized tf.Output) { func IsBoostedTreesEnsembleInitialized(scope *Scope, tree_ensemble_handle tf.Output) (is_initialized tf.Output) {
@ -5160,7 +5160,7 @@ func CudnnRNNParamsToCanonicalV2NumProj(value int64) CudnnRNNParamsToCanonicalV2
// num_layers: Specifies the number of layers in the RNN model. // num_layers: Specifies the number of layers in the RNN model.
// num_units: Specifies the size of the hidden state. // num_units: Specifies the size of the hidden state.
// input_size: Specifies the size of the input state. // input_size: Specifies the size of the input state.
// num_params_weigths: number of weight parameter matrix for all layers. // num_params_weights: number of weight parameter matrix for all layers.
// num_params_biases: number of bias parameter vector for all layers. // num_params_biases: number of bias parameter vector for all layers.
// weights: the canonical form of weights that can be used for saving // weights: the canonical form of weights that can be used for saving
// and restoration. They are more likely to be compatible across different // and restoration. They are more likely to be compatible across different
@ -8378,7 +8378,7 @@ func BoostedTreesCalculateBestFeatureSplitSplitType(value string) BoostedTreesCa
// l1: l1 regularization factor on leaf weights, per instance based. // l1: l1 regularization factor on leaf weights, per instance based.
// l2: l2 regularization factor on leaf weights, per instance based. // l2: l2 regularization factor on leaf weights, per instance based.
// tree_complexity: adjustment to the gain, per leaf based. // tree_complexity: adjustment to the gain, per leaf based.
// min_node_weight: mininum avg of hessians in a node before required for the node to be considered for splitting. // min_node_weight: minimum avg of hessians in a node before required for the node to be considered for splitting.
// logits_dimension: The dimension of logit, i.e., number of classes. // logits_dimension: The dimension of logit, i.e., number of classes.
// //
// Returns: // Returns:
@ -13774,7 +13774,7 @@ func DebugNumericSummaryV2OutputDtype(value tf.DataType) DebugNumericSummaryV2At
// element is a bit which is set to 1 if the input tensor has an // element is a bit which is set to 1 if the input tensor has an
// infinity or nan value, or zero otherwise. // infinity or nan value, or zero otherwise.
// //
// 3 (CONCISE_HEALTH): Ouput a float32/64 tensor of shape [5]. The 1st // 3 (CONCISE_HEALTH): Output a float32/64 tensor of shape [5]. The 1st
// element is the tensor_id, if provided, and -1 otherwise. The // element is the tensor_id, if provided, and -1 otherwise. The
// remaining four slots are the total number of elements, -infs, // remaining four slots are the total number of elements, -infs,
// +infs, and nans in the input tensor respectively. // +infs, and nans in the input tensor respectively.
@ -14132,11 +14132,11 @@ func TridiagonalSolve(scope *Scope, diagonals tf.Output, rhs tf.Output, optional
// //
// Arguments: // Arguments:
// superdiag: Tensor of shape `[..., 1, M]`, representing superdiagonals of // superdiag: Tensor of shape `[..., 1, M]`, representing superdiagonals of
// tri-diagonal matrices to the left of multiplication. Last element is ingored. // tri-diagonal matrices to the left of multiplication. Last element is ignored.
// maindiag: Tensor of shape `[..., 1, M]`, representing main diagonals of tri-diagonal // maindiag: Tensor of shape `[..., 1, M]`, representing main diagonals of tri-diagonal
// matrices to the left of multiplication. // matrices to the left of multiplication.
// subdiag: Tensor of shape `[..., 1, M]`, representing subdiagonals of tri-diagonal // subdiag: Tensor of shape `[..., 1, M]`, representing subdiagonals of tri-diagonal
// matrices to the left of multiplication. First element is ingored. // matrices to the left of multiplication. First element is ignored.
// rhs: Tensor of shape `[..., M, N]`, representing MxN matrices to the right of // rhs: Tensor of shape `[..., M, N]`, representing MxN matrices to the right of
// multiplication. // multiplication.
// //
@ -17744,7 +17744,7 @@ func CudnnRNNCanonicalToParamsV2NumProj(value int64) CudnnRNNCanonicalToParamsV2
// biases: the canonical form of biases that can be used for saving // biases: the canonical form of biases that can be used for saving
// and restoration. They are more likely to be compatible across different // and restoration. They are more likely to be compatible across different
// generations. // generations.
// num_params_weigths: number of weight parameter matrix for all layers. // num_params_weights: number of weight parameter matrix for all layers.
// num_params_biases: number of bias parameter vector for all layers. // num_params_biases: number of bias parameter vector for all layers.
// rnn_mode: Indicates the type of the RNN model. // rnn_mode: Indicates the type of the RNN model.
// input_mode: Indicate whether there is a linear projection between the input and // input_mode: Indicate whether there is a linear projection between the input and
@ -30931,8 +30931,8 @@ func ResourceApplyFtrlV2UseLocking(value bool) ResourceApplyFtrlV2Attr {
// linear: Should be from a Variable(). // linear: Should be from a Variable().
// grad: The gradient. // grad: The gradient.
// lr: Scaling factor. Must be a scalar. // lr: Scaling factor. Must be a scalar.
// l1: L1 regulariation. Must be a scalar. // l1: L1 regularization. Must be a scalar.
// l2: L2 shrinkage regulariation. Must be a scalar. // l2: L2 shrinkage regularization. Must be a scalar.
// //
// lr_power: Scaling factor. Must be a scalar. // lr_power: Scaling factor. Must be a scalar.
// //
@ -36271,8 +36271,8 @@ func ResourceApplyFtrlUseLocking(value bool) ResourceApplyFtrlAttr {
// linear: Should be from a Variable(). // linear: Should be from a Variable().
// grad: The gradient. // grad: The gradient.
// lr: Scaling factor. Must be a scalar. // lr: Scaling factor. Must be a scalar.
// l1: L1 regulariation. Must be a scalar. // l1: L1 regularization. Must be a scalar.
// l2: L2 regulariation. Must be a scalar. // l2: L2 regularization. Must be a scalar.
// lr_power: Scaling factor. Must be a scalar. // lr_power: Scaling factor. Must be a scalar.
// //
// Returns the created operation. // Returns the created operation.
@ -42921,7 +42921,7 @@ func ResourceSparseApplyFtrlV2UseLocking(value bool) ResourceSparseApplyFtrlV2At
// indices: A vector of indices into the first dimension of var and accum. // indices: A vector of indices into the first dimension of var and accum.
// lr: Scaling factor. Must be a scalar. // lr: Scaling factor. Must be a scalar.
// l1: L1 regularization. Must be a scalar. // l1: L1 regularization. Must be a scalar.
// l2: L2 shrinkage regulariation. Must be a scalar. // l2: L2 shrinkage regularization. Must be a scalar.
// //
// lr_power: Scaling factor. Must be a scalar. // lr_power: Scaling factor. Must be a scalar.
// //

View File

@ -36,7 +36,7 @@ class EndpointSpec {
// package: package of this endpoint (from which also derives its package) // package: package of this endpoint (from which also derives its package)
// name: name of this endpoint class // name: name of this endpoint class
// javadoc: the endpoint class documentation // javadoc: the endpoint class documentation
// TODO(annarev): hardcode depcreated to false until deprecated is possible // TODO(annarev): hardcode deprecated to false until deprecated is possible
EndpointSpec(const string& package, const string& name, EndpointSpec(const string& package, const string& name,
const Javadoc& javadoc) const Javadoc& javadoc)
: package_(package), name_(name), javadoc_(javadoc), deprecated_(false) {} : package_(package), name_(name), javadoc_(javadoc), deprecated_(false) {}

View File

@ -361,7 +361,7 @@ TEST(WriteType, ParameterizedClassAndSupertypes) {
clazz.add_parameter(type_t); clazz.add_parameter(type_t);
Type type_u = Type::Generic("U").add_supertype(Type::Class("Number")); Type type_u = Type::Generic("U").add_supertype(Type::Class("Number"));
clazz.add_parameter(type_u); clazz.add_parameter(type_u);
clazz.add_supertype(Type::Interface("Parametrizable").add_parameter(type_u)); clazz.add_supertype(Type::Interface("Parameterizable").add_parameter(type_u));
clazz.add_supertype(Type::Interface("Runnable")); clazz.add_supertype(Type::Interface("Runnable"));
clazz.add_supertype(Type::Class("SuperTest").add_parameter(type_t)); clazz.add_supertype(Type::Class("SuperTest").add_parameter(type_t));
@ -370,7 +370,7 @@ TEST(WriteType, ParameterizedClassAndSupertypes) {
const char* expected = const char* expected =
"package org.tensorflow;\n\n" "package org.tensorflow;\n\n"
"public class Test<T, U extends Number>" "public class Test<T, U extends Number>"
" extends SuperTest<T> implements Parametrizable<U>, Runnable {\n}\n"; " extends SuperTest<T> implements Parameterizable<U>, Runnable {\n}\n";
ASSERT_STREQ(expected, writer.str().data()); ASSERT_STREQ(expected, writer.str().data());
} }

View File

@ -5678,7 +5678,7 @@ cc_import(
name = "pywrap_tensorflow_import_lib", name = "pywrap_tensorflow_import_lib",
interface_library = select({ interface_library = select({
"//tensorflow:windows": ":pywrap_tensorflow_import_lib_file", "//tensorflow:windows": ":pywrap_tensorflow_import_lib_file",
"//conditions:default": "not_exsiting_on_unix.lib", # Just a placeholder for Unix platforms "//conditions:default": "not_existing_on_unix.lib", # Just a placeholder for Unix platforms
}), }),
system_provided = 1, system_provided = 1,
) )

View File

@ -837,7 +837,7 @@ class LSTMCell(recurrent.LSTMCell):
inputs: A 2D tensor, with shape of `[batch, feature]`. inputs: A 2D tensor, with shape of `[batch, feature]`.
states: List of 2 tensors that corresponding to the cell's units. Both of states: List of 2 tensors that corresponding to the cell's units. Both of
them have shape `[batch, units]`, the first tensor is the memory state them have shape `[batch, units]`, the first tensor is the memory state
from previous time step, the second tesnor is the carry state from from previous time step, the second tensor is the carry state from
previous time step. For timestep 0, the initial state provided by user previous time step. For timestep 0, the initial state provided by user
will be feed to cell. will be feed to cell.
training: Python boolean indicating whether the layer should behave in training: Python boolean indicating whether the layer should behave in

View File

@ -632,7 +632,7 @@ class TestWholeModelSaving(test.TestCase, parameterized.TestCase):
# out of proportion. Note that it fits into the internal HDF5 # out of proportion. Note that it fits into the internal HDF5
# attribute memory limit on its own but because h5py converts # attribute memory limit on its own but because h5py converts
# the list of layer names into numpy array, which uses the same # the list of layer names into numpy array, which uses the same
# amout of memory for every item, it increases the memory # amount of memory for every item, it increases the memory
# requirements substantially. # requirements substantially.
x = keras.Input(shape=(2,), name='input_' + ('x' * (2**15))) x = keras.Input(shape=(2,), name='input_' + ('x' * (2**15)))
f = x f = x
@ -1238,7 +1238,7 @@ class TestWeightSavingAndLoadingTFFormat(test.TestCase):
self.assertEqual(44., self.evaluate(v)) self.assertEqual(44., self.evaluate(v))
@test_util.run_in_graph_and_eager_modes @test_util.run_in_graph_and_eager_modes
def test_nonexistant_prefix_directory(self): def test_nonexistent_prefix_directory(self):
m = keras.Model() m = keras.Model()
v = m.add_weight(name='v', shape=[]) v = m.add_weight(name='v', shape=[])
self.evaluate(v.assign(42.)) self.evaluate(v.assign(42.))

View File

@ -329,7 +329,7 @@ class ScatterTest(test.TestCase):
indices = np.array([2, 0, 5]) indices = np.array([2, 0, 5])
self.evaluate(op(ref, indices, updates)) self.evaluate(op(ref, indices, updates))
# Indicies out of range should not fail. # Indices out of range should not fail.
indices = np.array([-1, 0, 5]) indices = np.array([-1, 0, 5])
self.evaluate(op(ref, indices, updates)) self.evaluate(op(ref, indices, updates))
indices = np.array([2, 0, 6]) indices = np.array([2, 0, 6])

View File

@ -151,7 +151,7 @@ class TestModuleNaming(test_util.TensorFlowTestCase):
with self.assertRaises(ErrorModuleError): with self.assertRaises(ErrorModuleError):
# If super ctor is not called then the name scope isn't opened. We need to # If super ctor is not called then the name scope isn't opened. We need to
# ensure that this doesn't trigger an exception (e.g. the metaclass trying # ensure that this doesn't trigger an exception (e.g. the metaclass trying
# to __exit__ a non-existant name scope). # to __exit__ a non-existent name scope).
ErrorModule(call_super=False) ErrorModule(call_super=False)
self.assertEqual("", get_name_scope()) self.assertEqual("", get_name_scope())

View File

@ -291,7 +291,7 @@ def _aggregate_across_replicas(metrics_collections, metric_value_fn, *args):
# inside a while_loop (and perhaps a TPU rewrite context). But we don't # inside a while_loop (and perhaps a TPU rewrite context). But we don't
# want the value op to be evaluated every step or on the TPU. So we # want the value op to be evaluated every step or on the TPU. So we
# create it outside so that it can be evaluated at the end on the host, # create it outside so that it can be evaluated at the end on the host,
# once the update ops have been evaluted. # once the update ops have been evaluated.
# pylint: disable=protected-access # pylint: disable=protected-access
if distribution.extended._outer_control_flow_context is None: if distribution.extended._outer_control_flow_context is None:

View File

@ -72,7 +72,7 @@ def BuildFullModel():
return sgd_op.minimize(loss) return sgd_op.minimize(loss)
def BuildSplitableModel(): def BuildSplittableModel():
"""Build a small model that can be run partially in each step.""" """Build a small model that can be run partially in each step."""
image = array_ops.zeros([2, 6, 6, 3]) image = array_ops.zeros([2, 6, 6, 3])

View File

@ -111,7 +111,7 @@ class ProfilerTest(test.TestCase):
opts = builder.time_and_memory(min_bytes=0) opts = builder.time_and_memory(min_bytes=0)
with session.Session() as sess: with session.Session() as sess:
r1, r2, r3 = lib.BuildSplitableModel() r1, r2, r3 = lib.BuildSplittableModel()
sess.run(variables.global_variables_initializer()) sess.run(variables.global_variables_initializer())
profiler = model_analyzer.Profiler(sess.graph) profiler = model_analyzer.Profiler(sess.graph)

View File

@ -163,7 +163,7 @@ class UtilsTest(test.TestCase):
def testGetTensorFromInfoRaisesErrors(self): def testGetTensorFromInfoRaisesErrors(self):
expected = array_ops.placeholder(dtypes.float32, 1, name="x") expected = array_ops.placeholder(dtypes.float32, 1, name="x")
tensor_info = utils.build_tensor_info(expected) tensor_info = utils.build_tensor_info(expected)
tensor_info.name = "blah:0" # Nonexistant name. tensor_info.name = "blah:0" # Nonexistent name.
with self.assertRaises(KeyError): with self.assertRaises(KeyError):
utils.get_tensor_from_tensor_info(tensor_info) utils.get_tensor_from_tensor_info(tensor_info)
tensor_info.ClearField("name") # Malformed (missing encoding). tensor_info.ClearField("name") # Malformed (missing encoding).

View File

@ -260,7 +260,7 @@ class MomentumOptimizerTest(test.TestCase):
self.assertAllCloseAccordingToType([[-111, -138]], self.evaluate(var0)) self.assertAllCloseAccordingToType([[-111, -138]], self.evaluate(var0))
@test_util.run_in_graph_and_eager_modes(reset_test=True) @test_util.run_in_graph_and_eager_modes(reset_test=True)
def testMinimizeWith2DIndiciesForEmbeddingLookup(self): def testMinimizeWith2DIndicesForEmbeddingLookup(self):
# This test invokes the ResourceSparseApplyMomentum operation, which # This test invokes the ResourceSparseApplyMomentum operation, which
# did not have a registered GPU kernel as of April 2018. With graph # did not have a registered GPU kernel as of April 2018. With graph
# execution, the placement algorithm notices this and automatically # execution, the placement algorithm notices this and automatically

View File

@ -92,7 +92,7 @@ string SideString(Side s);
// Type with which intermediate computations of a blas routine are performed. // Type with which intermediate computations of a blas routine are performed.
// //
// Some blas calls can perform computations with a type that's different than // Some blas calls can perform computations with a type that's different than
// the type of their inputs/outputs. This lets you e.g. multiply two matricies // the type of their inputs/outputs. This lets you e.g. multiply two matrices
// of int8s using float32s to store the matmul's intermediate values. // of int8s using float32s to store the matmul's intermediate values.
enum class ComputationType { enum class ComputationType {
kF16, // 16-bit floating-point kF16, // 16-bit floating-point

View File

@ -1195,7 +1195,7 @@ class CudnnRnnDescriptor : public dnn::RnnDescriptor {
namespace { namespace {
// Check if the LSTM projection is used. If yes, an additional weigth matrix // Check if the LSTM projection is used. If yes, an additional weight matrix
// (projection matrix) will be fetched to the 'weights'. Otherwise, nothing will // (projection matrix) will be fetched to the 'weights'. Otherwise, nothing will
// be done. // be done.
port::Status CheckAndFetchProjectionWeights( port::Status CheckAndFetchProjectionWeights(

View File

@ -516,11 +516,11 @@ cudnnStatus_t CUDNNWINAPI cudnnGetConvolutionNdForwardOutputDim(
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[] ) { int tensorOutputDimA[] ) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor( cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor(

View File

@ -559,11 +559,11 @@ cudnnStatus_t CUDNNWINAPI cudnnGetConvolutionNdForwardOutputDim(
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[] ) { int tensorOutputDimA[] ) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor( cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor(

View File

@ -559,11 +559,11 @@ cudnnStatus_t CUDNNWINAPI cudnnGetConvolutionNdForwardOutputDim(
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[] ) { int tensorOutputDimA[] ) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor( cudnnStatus_t CUDNNWINAPI cudnnDestroyConvolutionDescriptor(

View File

@ -557,11 +557,11 @@ cudnnGetConvolutionNdForwardOutputDim(const cudnnConvolutionDescriptor_t convDes
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[]) { int tensorOutputDimA[]) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnStatus_t CUDNNWINAPI

View File

@ -557,11 +557,11 @@ cudnnGetConvolutionNdForwardOutputDim(const cudnnConvolutionDescriptor_t convDes
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[]) { int tensorOutputDimA[]) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnStatus_t CUDNNWINAPI

View File

@ -702,11 +702,11 @@ cudnnGetConvolutionNdForwardOutputDim(const cudnnConvolutionDescriptor_t convDes
const cudnnTensorDescriptor_t inputTensorDesc, const cudnnTensorDescriptor_t inputTensorDesc,
const cudnnFilterDescriptor_t filterDesc, const cudnnFilterDescriptor_t filterDesc,
int nbDims, int nbDims,
int tensorOuputDimA[]) { int tensorOutputDimA[]) {
using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []); using FuncPtr = cudnnStatus_t (CUDNNWINAPI *)(const cudnnConvolutionDescriptor_t, const cudnnTensorDescriptor_t, const cudnnFilterDescriptor_t, int, int []);
static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim"); static auto func_ptr = LoadSymbol<FuncPtr>("cudnnGetConvolutionNdForwardOutputDim");
if (!func_ptr) return GetSymbolNotFoundError(); if (!func_ptr) return GetSymbolNotFoundError();
return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOuputDimA); return func_ptr(convDesc, inputTensorDesc, filterDesc, nbDims, tensorOutputDimA);
} }
cudnnStatus_t CUDNNWINAPI cudnnStatus_t CUDNNWINAPI

View File

@ -4887,7 +4887,7 @@ cusparseStatus_t CUSPARSEAPI cusparseDcsr2csr_compress(
int m, // number of rows int m, // number of rows
int n, const cusparseMatDescr_t descra, int n, const cusparseMatDescr_t descra,
const double *csrValA, // csr values array-the elements which are below a const double *csrValA, // csr values array-the elements which are below a
// certain tolerance will be remvoed // certain tolerance will be removed
const int *csrColIndA, const int *csrColIndA,
const int *csrRowPtrA, // corresponding input noncompressed row pointer const int *csrRowPtrA, // corresponding input noncompressed row pointer
int nnzA, const int *nnzPerRow, double *csrValC, int *csrColIndC, int nnzA, const int *nnzPerRow, double *csrValC, int *csrColIndC,
@ -4907,7 +4907,7 @@ cusparseStatus_t CUSPARSEAPI cusparseCcsr2csr_compress(
int m, // number of rows int m, // number of rows
int n, const cusparseMatDescr_t descra, int n, const cusparseMatDescr_t descra,
const cuComplex *csrValA, // csr values array-the elements which are below const cuComplex *csrValA, // csr values array-the elements which are below
// a certain tolerance will be remvoed // a certain tolerance will be removed
const int *csrColIndA, const int *csrColIndA,
const int *csrRowPtrA, // corresponding input noncompressed row pointer const int *csrRowPtrA, // corresponding input noncompressed row pointer
int nnzA, const int *nnzPerRow, cuComplex *csrValC, int *csrColIndC, int nnzA, const int *nnzPerRow, cuComplex *csrValC, int *csrColIndC,
@ -4927,7 +4927,7 @@ cusparseStatus_t CUSPARSEAPI cusparseZcsr2csr_compress(
int m, // number of rows int m, // number of rows
int n, const cusparseMatDescr_t descra, int n, const cusparseMatDescr_t descra,
const cuDoubleComplex *csrValA, // csr values array-the elements which are const cuDoubleComplex *csrValA, // csr values array-the elements which are
// below a certain tolerance will be remvoed // below a certain tolerance will be removed
const int *csrColIndA, const int *csrColIndA,
const int *csrRowPtrA, // corresponding input noncompressed row pointer const int *csrRowPtrA, // corresponding input noncompressed row pointer
int nnzA, const int *nnzPerRow, cuDoubleComplex *csrValC, int *csrColIndC, int nnzA, const int *nnzPerRow, cuDoubleComplex *csrValC, int *csrColIndC,

View File

@ -137,7 +137,7 @@ bool ThreadDimOk(const DeviceDescription &device_description,
thread_dim.z <= limit.z; thread_dim.z <= limit.z;
if (!ok) { if (!ok) {
VLOG(2) << "thread dim " << thread_dim.ToString() VLOG(2) << "thread dim " << thread_dim.ToString()
<< " exceeds limit contraints of " << limit.ToString(); << " exceeds limit constraints of " << limit.ToString();
} }
return ok; return ok;
} }

View File

@ -109,7 +109,7 @@ class DeviceMemoryBase {
private: private:
void *opaque_; // Platform-dependent value representing allocated memory. void *opaque_; // Platform-dependent value representing allocated memory.
uint64 size_; // Size in bytes of this allocation. uint64 size_; // Size in bytes of this allocation.
uint64 payload_ = 0; // Payload data associtated with this allocation. uint64 payload_ = 0; // Payload data associated with this allocation.
}; };
// Typed wrapper around "void *"-like DeviceMemoryBase. // Typed wrapper around "void *"-like DeviceMemoryBase.

View File

@ -2148,7 +2148,7 @@ class DnnSupport {
// max_seq_length: the max length of the sequences. // max_seq_length: the max length of the sequences.
// batch_size: the size of a minibatch. // batch_size: the size of a minibatch.
// data_size: the size of the state. // data_size: the size of the state.
// seq_lenghs: the lengths of sequences in a batch. // seq_lengths: the lengths of sequences in a batch.
// data_type: an enum to specify the type for the underlying data. // data_type: an enum to specify the type for the underlying data.
virtual port::StatusOr<std::unique_ptr<dnn::RnnSequenceTensorDescriptor>> virtual port::StatusOr<std::unique_ptr<dnn::RnnSequenceTensorDescriptor>>
createRnnSequenceTensorDescriptor(int max_seq_length, int batch_size, createRnnSequenceTensorDescriptor(int max_seq_length, int batch_size,

View File

@ -40,7 +40,7 @@ namespace stream_executor {
namespace gpu { namespace gpu {
// CUDA-platform implementation of the platform-agnostic // CUDA-platform implementation of the platform-agnostic
// StreamExecutorInferface. // StreamExecutorInterface.
class GpuExecutor : public internal::StreamExecutorInterface { class GpuExecutor : public internal::StreamExecutorInterface {
public: public:
// sub_platform indicates the subplatform used in this executor; it must // sub_platform indicates the subplatform used in this executor; it must
@ -328,10 +328,10 @@ class GpuExecutor : public internal::StreamExecutorInterface {
// for use in getting device metadata. Immutable post-initialization. // for use in getting device metadata. Immutable post-initialization.
int device_ordinal_; int device_ordinal_;
// The major verion of the compute capability for device_. // The major version of the compute capability for device_.
int cc_major_; int cc_major_;
// The minor verion of the compute capability for device_. // The minor version of the compute capability for device_.
int cc_minor_; int cc_minor_;
// GPU ISA version for device_. // GPU ISA version for device_.

View File

@ -30,7 +30,7 @@ class GpuExecutor;
class GpuStream; class GpuStream;
// Wraps a pair of GpuEventHandles in order to satisfy the platform-independent // Wraps a pair of GpuEventHandles in order to satisfy the platform-independent
// TimerInferface -- both a start and a stop event are present which may be // TimerInterface -- both a start and a stop event are present which may be
// recorded in a stream. // recorded in a stream.
class GpuTimer : public internal::TimerInterface { class GpuTimer : public internal::TimerInterface {
public: public:

View File

@ -116,7 +116,7 @@ class MultiPlatformManager {
static port::StatusOr<Platform*> InitializePlatformWithId( static port::StatusOr<Platform*> InitializePlatformWithId(
const Platform::Id& id, const std::map<string, string>& options); const Platform::Id& id, const std::map<string, string>& options);
// Retrives the platforms satisfying the given filter, i.e. returns true. // Retrieves the platforms satisfying the given filter, i.e. returns true.
// Returned Platforms are always initialized. // Returned Platforms are always initialized.
static port::StatusOr<std::vector<Platform*>> PlatformsWithFilter( static port::StatusOr<std::vector<Platform*>> PlatformsWithFilter(
const std::function<bool(const Platform*)>& filter); const std::function<bool(const Platform*)>& filter);
@ -134,7 +134,7 @@ class MultiPlatformManager {
// during allocation of such Platforms, to avoid spurious reporting at program // during allocation of such Platforms, to avoid spurious reporting at program
// exit. // exit.
// Interface for a listener that gets notfied at certain events. // Interface for a listener that gets notified at certain events.
class Listener { class Listener {
public: public:
virtual ~Listener() = default; virtual ~Listener() = default;

File diff suppressed because it is too large Load Diff

View File

@ -110,7 +110,7 @@ class ROCMBlas : public blas::BlasSupport {
/*err_on_failure=*/false, args...); /*err_on_failure=*/false, args...);
} }
// A helper allocation funciton to convert raw pointers memory layout to // A helper allocation function to convert raw pointers memory layout to
// strided flavor // strided flavor
template <typename T> template <typename T>
port::Status AllocateStridedBuffer( port::Status AllocateStridedBuffer(

View File

@ -2633,7 +2633,7 @@ void* MIOpenAllocatorCallback(void* ctx, size_t size_in_bytes) {
} }
void MIOpenDeallocatorCallback(void* ctx, void* mem) { void MIOpenDeallocatorCallback(void* ctx, void* mem) {
// Don't need dealloactor since the TensorFlow heap will automatically reclaim // Don't need deallocator since the TensorFlow heap will automatically reclaim
// the memory // the memory
} }
@ -3910,7 +3910,7 @@ bool MIOpenSupport::DoPoolBackward(
return false; return false;
} }
} else { } else {
LOG(ERROR) << "Failed to calcuate tensor size to chain forward and " LOG(ERROR) << "Failed to calculate tensor size to chain forward and "
"backward pooling"; "backward pooling";
} }
@ -4006,7 +4006,7 @@ bool MIOpenSupport::DoPoolBackward(
return false; return false;
} }
} else { } else {
LOG(ERROR) << "Failed to calcuate tensor size to chain forward and " LOG(ERROR) << "Failed to calculate tensor size to chain forward and "
"backward pooling"; "backward pooling";
} }
@ -4144,7 +4144,7 @@ bool MIOpenSupport::DoNormalizeBackwardWithDimensions(
} }
} else { } else {
LOG(ERROR) LOG(ERROR)
<< "Failed to calcuate tensor size to chain forward and backward LRN"; << "Failed to calculate tensor size to chain forward and backward LRN";
} }
status = wrap::miopenLRNForward(miopen.handle(), normalize.handle(), &alpha, status = wrap::miopenLRNForward(miopen.handle(), normalize.handle(), &alpha,

View File

@ -298,14 +298,14 @@ port::Status ROCMFftPlan::Initialize(
if (ret != HIPFFT_SUCCESS) { if (ret != HIPFFT_SUCCESS) {
LOG(ERROR) << "failed to create rocFFT batched plan:" << ret; LOG(ERROR) << "failed to create rocFFT batched plan:" << ret;
return port::Status{port::error::INTERNAL, return port::Status{port::error::INTERNAL,
"Failed to create rocFFT bacthed plan."}; "Failed to create rocFFT batched plan."};
} }
} else { } else {
auto ret = wrap::hipfftCreate(parent, &plan_); auto ret = wrap::hipfftCreate(parent, &plan_);
if (ret != HIPFFT_SUCCESS) { if (ret != HIPFFT_SUCCESS) {
LOG(ERROR) << "failed to create rocFFT batched plan:" << ret; LOG(ERROR) << "failed to create rocFFT batched plan:" << ret;
return port::Status{port::error::INTERNAL, return port::Status{port::error::INTERNAL,
"Failed to create rocFFT bacthed plan."}; "Failed to create rocFFT batched plan."};
} }
ret = wrap::hipfftSetAutoAllocation(parent, plan_, 0); ret = wrap::hipfftSetAutoAllocation(parent, plan_, 0);
if (ret != HIPFFT_SUCCESS) { if (ret != HIPFFT_SUCCESS) {
@ -313,7 +313,7 @@ port::Status ROCMFftPlan::Initialize(
<< ret; << ret;
return port::Status{ return port::Status{
port::error::INTERNAL, port::error::INTERNAL,
"Failed to set auto allocation for rocFFT bacthed plan."}; "Failed to set auto allocation for rocFFT batched plan."};
} }
size_t size_in_bytes; size_t size_in_bytes;
ret = wrap::hipfftMakePlanMany( ret = wrap::hipfftMakePlanMany(
@ -324,7 +324,7 @@ port::Status ROCMFftPlan::Initialize(
if (ret != HIPFFT_SUCCESS) { if (ret != HIPFFT_SUCCESS) {
LOG(ERROR) << "failed to make rocFFT batched plan:" << ret; LOG(ERROR) << "failed to make rocFFT batched plan:" << ret;
return port::Status{port::error::INTERNAL, return port::Status{port::error::INTERNAL,
"Failed to make rocFFT bacthed plan."}; "Failed to make rocFFT batched plan."};
} }
if (size_in_bytes != 0) { if (size_in_bytes != 0) {
auto allocated = scratch_allocator->AllocateBytes(size_in_bytes); auto allocated = scratch_allocator->AllocateBytes(size_in_bytes);
@ -338,7 +338,7 @@ port::Status ROCMFftPlan::Initialize(
if (ret != HIPFFT_SUCCESS) { if (ret != HIPFFT_SUCCESS) {
LOG(ERROR) << "failed to set work area for rocFFT batched plan:" << ret; LOG(ERROR) << "failed to set work area for rocFFT batched plan:" << ret;
return port::Status{port::error::INTERNAL, return port::Status{port::error::INTERNAL,
"Failed to set work area for rocFFT bacthed plan."}; "Failed to set work area for rocFFT batched plan."};
} }
} }
} }

View File

@ -31,7 +31,7 @@ class Stream;
// buffers it has allocated at destruction. Returned memory pointers are not // buffers it has allocated at destruction. Returned memory pointers are not
// owning. // owning.
// //
// Used by stream operations (e.g. Stream::ThenConvolveWithScratch) to optonally // Used by stream operations (e.g. Stream::ThenConvolveWithScratch) to optionally
// request scratch space to speed up the operation. // request scratch space to speed up the operation.
class ScratchAllocator { class ScratchAllocator {
public: public:

View File

@ -685,7 +685,7 @@ class StreamExecutor {
std::unique_ptr<rng::RngSupport> rng_ GUARDED_BY(mu_); std::unique_ptr<rng::RngSupport> rng_ GUARDED_BY(mu_);
// Slot to cache the owned DeviceDescription for the underlying device // Slot to cache the owned DeviceDescription for the underlying device
// once it has been quieried from DeviceDescription(). // once it has been queried from DeviceDescription().
mutable std::unique_ptr<DeviceDescription> device_description_ mutable std::unique_ptr<DeviceDescription> device_description_
GUARDED_BY(mu_); GUARDED_BY(mu_);

View File

@ -15,7 +15,7 @@
# ============================================================================== # ==============================================================================
# #
# Script to create a centos6 docker image. # Script to create a centos6 docker image.
# Before running, copy tensorrt into /tmp after downlading it from: # Before running, copy tensorrt into /tmp after downloading it from:
# https://developer.nvidia.com/nvidia-tensorrt-5x-download # https://developer.nvidia.com/nvidia-tensorrt-5x-download
# #
# TODO(klimek): once there are downloadable images for tensorrt for centos6 # TODO(klimek): once there are downloadable images for tensorrt for centos6

View File

@ -15,7 +15,7 @@
# ============================================================================== # ==============================================================================
# #
# Script to create a centos6 docker image. # Script to create a centos6 docker image.
# Before running, copy tensorrt into /tmp after downlading it from: # Before running, copy tensorrt into /tmp after downloading it from:
# https://developer.nvidia.com/nvidia-tensorrt-5x-download # https://developer.nvidia.com/nvidia-tensorrt-5x-download
# #
# TODO(klimek): once there are downloadable images for tensorrt for centos6 # TODO(klimek): once there are downloadable images for tensorrt for centos6

View File

@ -75,7 +75,7 @@ fi
BASE_DIR=$(upsearch "${DOCKERFILE}") BASE_DIR=$(upsearch "${DOCKERFILE}")
if [[ -z "${BASE_DIR}" ]]; then if [[ -z "${BASE_DIR}" ]]; then
die "FAILED: Unable to find the base directory where the dockerfile "\ die "FAILED: Unable to find the base directory where the dockerfile "\
"${DOCKERFFILE} resides" "${DOCKERFILE} resides"
fi fi
echo "Base directory: ${BASE_DIR}" echo "Base directory: ${BASE_DIR}"

View File

@ -30,7 +30,7 @@
# #
# TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES overrides the default extra pip packages # TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES overrides the default extra pip packages
# to be installed in virtualenv before run_pip_tests.sh is called. Multiple # to be installed in virtualenv before run_pip_tests.sh is called. Multiple
# pakcage names are separated with spaces. # package names are separated with spaces.
# #
# If NO_TEST_ON_INSTALL has any non-empty and non-0 value, the test-on-install # If NO_TEST_ON_INSTALL has any non-empty and non-0 value, the test-on-install
# part will be skipped. # part will be skipped.

View File

@ -72,7 +72,7 @@
# GIT_TAG_OVERRIDE: Values for `--git_tag_override`. This flag gets passed # GIT_TAG_OVERRIDE: Values for `--git_tag_override`. This flag gets passed
# in as `--action_env` for bazel build and tests. # in as `--action_env` for bazel build and tests.
# TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES: # TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES:
# Additonal pip packages to be installed. # Additional pip packages to be installed.
# Caveat: pip version needs to be checked prior. # Caveat: pip version needs to be checked prior.
# #
# ============================================================================== # ==============================================================================

View File

@ -196,7 +196,7 @@ else
"/usr/local/cuda/lib and /usr/local/cuda/lib64" "/usr/local/cuda/lib and /usr/local/cuda/lib64"
fi fi
echo "Found CUDA library diretory at: ${CUDA_LIB_DIR}" echo "Found CUDA library directory at: ${CUDA_LIB_DIR}"
echo "" echo ""
# USER_OP_SO=$(basename $(echo "${OP_KERNEL_CC}" | sed -e 's/\.cc/\.so/')) # USER_OP_SO=$(basename $(echo "${OP_KERNEL_CC}" | sed -e 's/\.cc/\.so/'))

View File

@ -42,7 +42,7 @@ if [[ "$MODE" == "eigen" ]]; then
else else
CONFIG="--config=mkl" CONFIG="--config=mkl"
# Setting OMP_THREADS for low performing benchmarks. # Setting OMP_THREADS for low performing benchmarks.
# Default value(=core count) degrades perfrmance of some banchmark cases. # Default value(=core count) degrades performance of some benchmark cases.
# Optimal thread count is case specific. # Optimal thread count is case specific.
# An argument can be passed to script, the value of which is used if given. # An argument can be passed to script, the value of which is used if given.
# Otherwise OMP_NUM_THREADS is set to 10 # Otherwise OMP_NUM_THREADS is set to 10

View File

@ -612,7 +612,7 @@ addons_symbol_mappings = {
"tf.contrib.image.angles_to_projective_transforms": "tf.contrib.image.angles_to_projective_transforms":
"tfa.image.angles_to_projective_transforms", "tfa.image.angles_to_projective_transforms",
"tf.contrib.image.matrices_to_flat_transforms": "tf.contrib.image.matrices_to_flat_transforms":
"tfa.image.matricies_to_flat_transforms", "tfa.image.matrices_to_flat_transforms",
"tf.contrib.image.rotate": "tf.contrib.image.rotate":
"tfa.image.rotate", "tfa.image.rotate",
"tf.contrib.image.transform": "tf.contrib.image.transform":

View File

@ -1992,7 +1992,7 @@ def _pool_seed_transformer(parent, node, full_name, name, logs):
def _extract_glimpse_transformer(parent, node, full_name, name, logs): def _extract_glimpse_transformer(parent, node, full_name, name, logs):
def _replace_uniform_noise_node(parent, old_value): def _replace_uniform_noise_node(parent, old_value):
"""Replaces old_value with 'uniform' or 'guassian'.""" """Replaces old_value with 'uniform' or 'gaussian'."""
uniform = ast.Str(s="uniform") uniform = ast.Str(s="uniform")
gaussian = ast.Str(s="gaussian") gaussian = ast.Str(s="gaussian")
new_value = ast.IfExp(body=uniform, test=old_value, orelse=gaussian) new_value = ast.IfExp(body=uniform, test=old_value, orelse=gaussian)

View File

@ -449,7 +449,7 @@ bazel-bin/tensorflow/tools/compatibility/update/generate_v2_reorders_map
_, _, _, new_text = self._upgrade(text) _, _, _, new_text = self._upgrade(text)
self.assertEqual("tf.compat.v1." + ns_prefix + v + "(a, b)", new_text) self.assertEqual("tf.compat.v1." + ns_prefix + v + "(a, b)", new_text)
def testIntializers(self): def testInitializers(self):
initializers = [ initializers = [
"zeros", "zeros",
"ones", "ones",

View File

@ -135,7 +135,7 @@ def do_not_doc_inheritable(obj):
# method2 # method2
``` ```
When generating docs for a class's arributes, the `__mro__` is searched and When generating docs for a class's attributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute the attribute will be skipped if this decorator is detected on the attribute
on any class in the `__mro__`. on any class in the `__mro__`.
@ -178,7 +178,7 @@ def for_subclass_implementers(obj):
Works on method, or other class-attributes. Works on method, or other class-attributes.
When generating docs for a class's arributes, the `__mro__` is searched and When generating docs for a class's attributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute the attribute will be skipped if this decorator is detected on the attribute
on any **parent** class in the `__mro__`. on any **parent** class in the `__mro__`.

View File

@ -166,7 +166,7 @@ class DocGeneratorVisitor(object):
This function is meant to be used as the `key` to the `sorted` function. This function is meant to be used as the `key` to the `sorted` function.
This sorting in order: This sorting in order:
Prefers names refering to the defining class, over a subclass. Prefers names referring to the defining class, over a subclass.
Prefers names that are not in "contrib". Prefers names that are not in "contrib".
prefers submodules to the root namespace. prefers submodules to the root namespace.
Prefers short names `tf.thing` over `tf.a.b.c.thing` Prefers short names `tf.thing` over `tf.a.b.c.thing`

View File

@ -46,7 +46,7 @@ def is_free_function(py_object, full_name, index):
index: The {full_name:py_object} dictionary for the public API. index: The {full_name:py_object} dictionary for the public API.
Returns: Returns:
True if the obeject is a stand-alone function, and not part of a class True if the object is a stand-alone function, and not part of a class
definition. definition.
""" """
if not tf_inspect.isfunction(py_object): if not tf_inspect.isfunction(py_object):
@ -235,7 +235,7 @@ class ReferenceResolver(object):
return cls(doc_index=doc_index, **json_dict) return cls(doc_index=doc_index, **json_dict)
def to_json_file(self, filepath): def to_json_file(self, filepath):
"""Converts the RefenceResolver to json and writes it to the specified file. """Converts the ReferenceResolver to json and writes it to the specified file.
Args: Args:
filepath: The file path to write the json to. filepath: The file path to write the json to.

View File

@ -32,7 +32,7 @@ from tensorflow.tools.docs import doc_controls
from tensorflow.tools.docs import parser from tensorflow.tools.docs import parser
# The test needs a real module. `types.ModuleType()` doesn't work, as the result # The test needs a real module. `types.ModuleType()` doesn't work, as the result
# is a `builtin` module. Using "parser" here is arbitraty. The tests don't # is a `builtin` module. Using "parser" here is arbitrary. The tests don't
# depend on the module contents. At this point in the process the public api # depend on the module contents. At this point in the process the public api
# has already been extracted. # has already been extracted.
test_module = parser test_module = parser

View File

@ -18,7 +18,7 @@
The adjacent `parser` module creates `PageInfo` objects, containing all data The adjacent `parser` module creates `PageInfo` objects, containing all data
necessary to document an element of the TensorFlow API. necessary to document an element of the TensorFlow API.
This module contains one public function, which handels the conversion of these This module contains one public function, which handles the conversion of these
`PageInfo` objects into a markdown string: `PageInfo` objects into a markdown string:
md_page = build_md_page(page_info) md_page = build_md_page(page_info)

View File

@ -19,7 +19,7 @@ limitations under the License.
namespace tensorflow { namespace tensorflow {
namespace graph_transforms { namespace graph_transforms {
// Remove control depdencies in preparation for inference. // Remove control dependencies in preparation for inference.
// In the tensorflow graph, control dependencies are represented as extra // In the tensorflow graph, control dependencies are represented as extra
// inputs which are referenced with "^tensor_name". // inputs which are referenced with "^tensor_name".
// See node_def.proto for more details. // See node_def.proto for more details.

View File

@ -596,7 +596,7 @@ Status GetInOutTypes(const NodeDef& node_def, DataTypeVector* inputs,
Status TensorShapeFromString(const string& shape_string, TensorShape* result) { Status TensorShapeFromString(const string& shape_string, TensorShape* result) {
if (shape_string.empty()) { if (shape_string.empty()) {
return errors::InvalidArgument("Specificed shape is empty."); return errors::InvalidArgument("Specified shape is empty.");
} }
std::vector<string> dims_as_str = str_util::Split(shape_string, ","); std::vector<string> dims_as_str = str_util::Split(shape_string, ",");
std::vector<int64> dims; std::vector<int64> dims;

View File

@ -456,7 +456,7 @@ TEST(CreateProtoDebugStringLibTest, Enums) {
EXPECT_PARSE_SUCCESS("", "optional_nested_enum: -0"); EXPECT_PARSE_SUCCESS("", "optional_nested_enum: -0");
// TODO(amauryfa): restore the line below when protobuf::TextFormat also // TODO(amauryfa): restore the line below when protobuf::TextFormat also
// supports unknonwn enum values. // supports unknown enum values.
// EXPECT_PARSE_SUCCESS("optional_nested_enum: 6", "optional_nested_enum: 6"); // EXPECT_PARSE_SUCCESS("optional_nested_enum: 6", "optional_nested_enum: 6");
EXPECT_PARSE_FAILURE("optional_nested_enum: 2147483648"); // > INT32_MAX EXPECT_PARSE_FAILURE("optional_nested_enum: 2147483648"); // > INT32_MAX
EXPECT_PARSE_FAILURE("optional_nested_enum: BARNONE"); EXPECT_PARSE_FAILURE("optional_nested_enum: BARNONE");

View File

@ -117,7 +117,7 @@ def _get_func_name():
class ConfigCompatChecker(object): class ConfigCompatChecker(object):
"""Class that checks configuration versions and depencency compatibilities. """Class that checks configuration versions and dependency compatibilities.
`ConfigCompatChecker` checks a given set of configurations and their versions `ConfigCompatChecker` checks a given set of configurations and their versions
against supported versions and dependency rules defined in `.ini` config file. against supported versions and dependency rules defined in `.ini` config file.
@ -180,7 +180,7 @@ class ConfigCompatChecker(object):
"""Prints a requirement and its components. """Prints a requirement and its components.
Returns: Returns:
String that has concantenated information about a requirement. String that has concatenated information about a requirement.
""" """
info = { info = {
"section": self._section, "section": self._section,
@ -200,7 +200,7 @@ class ConfigCompatChecker(object):
req_str += "Range: {range}\n" req_str += "Range: {range}\n"
req_str += "Exclude: {exclude}\n" req_str += "Exclude: {exclude}\n"
req_str += "Include: {include}\n" req_str += "Include: {include}\n"
req_str += "Initilalized: {init}\n\n" req_str += "Initialized: {init}\n\n"
return req_str.format(**info) return req_str.format(**info)
@ -214,7 +214,7 @@ class ConfigCompatChecker(object):
[1] String that includes `range` indicating range syntax for defining [1] String that includes `range` indicating range syntax for defining
a requirement. a requirement.
e.g. `range(1.0, 2.0) include(3.0) exclude(1.5)` e.g. `range(1.0, 2.0) include(3.0) exclude(1.5)`
[2] List that includes inidividual supported versions or items. [2] List that includes individual supported versions or items.
e.g. [`1.0`, `3.0`, `7.1`] e.g. [`1.0`, `3.0`, `7.1`]
For a list type requirement, it directly stores the list to For a list type requirement, it directly stores the list to
@ -380,7 +380,7 @@ class ConfigCompatChecker(object):
parser.read(self.req_file) parser.read(self.req_file)
if not parser.sections(): if not parser.sections():
err_msg = "[Error] Empty confie file. " err_msg = "[Error] Empty config file. "
err_msg += "(file = %s, " % str(self.req_file) err_msg += "(file = %s, " % str(self.req_file)
err_msg += "parser sectons = %s)" % str(parser.sections()) err_msg += "parser sectons = %s)" % str(parser.sections())
self.error_msg.append(err_msg) self.error_msg.append(err_msg)
@ -427,7 +427,7 @@ class ConfigCompatChecker(object):
self.warning_msg.append(warn_msg) self.warning_msg.append(warn_msg)
# Last dependency item may only or not have `]` depending # Last dependency item may only or not have `]` depending
# on the identation style in the config (.ini) file. # on the indentation style in the config (.ini) file.
# If it has `[`, then either skip or remove from string. # If it has `[`, then either skip or remove from string.
if spec_split[-1] == "]": if spec_split[-1] == "]":
spec_split = spec_split[:-1] spec_split = spec_split[:-1]

View File

@ -327,7 +327,7 @@ def get_cuda_version_all():
def get_cuda_version_default(): def get_cuda_version_default():
"""Retrieves default CUDA version. """Retrieves default CUDA version.
Default verion is the version found in `/usr/local/cuda/` installation. Default version is the version found in `/usr/local/cuda/` installation.
stderr is silenced by default. Setting FLAGS.debug mode will not enable it. stderr is silenced by default. Setting FLAGS.debug mode will not enable it.
Remove `2> /dev/null` command from `cmds_linux['cuda_ver_dflt']` to enable Remove `2> /dev/null` command from `cmds_linux['cuda_ver_dflt']` to enable

View File

@ -15,8 +15,8 @@ def _cc_clang_autoconf(repo_ctx):
return return
download_clang(repo_ctx, out_folder = "extra_tools") download_clang(repo_ctx, out_folder = "extra_tools")
overriden_tools = {"gcc": "extra_tools/bin/clang"} overridden_tools = {"gcc": "extra_tools/bin/clang"}
cc_autoconf_impl(repo_ctx, overriden_tools) cc_autoconf_impl(repo_ctx, overridden_tools)
cc_download_clang_toolchain = repository_rule( cc_download_clang_toolchain = repository_rule(
environ = [ environ = [

View File

@ -17,7 +17,7 @@ def flatbuffer_library_public(
include_paths = [], include_paths = [],
flatc_args = DEFAULT_FLATC_ARGS, flatc_args = DEFAULT_FLATC_ARGS,
reflection_name = "", reflection_name = "",
reflection_visiblity = None, reflection_visibility = None,
output_to_bindir = False): output_to_bindir = False):
"""Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler. """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler.
@ -101,7 +101,7 @@ def flatbuffer_library_public(
# entries = [ # entries = [
# native.FilesetEntry(files = reflection_outs), # native.FilesetEntry(files = reflection_outs),
# ], # ],
# visibility = reflection_visiblity, # visibility = reflection_visibility,
# ) # )
def flatbuffer_cc_library( def flatbuffer_cc_library(
@ -191,7 +191,7 @@ def flatbuffer_cc_library(
include_paths = include_paths, include_paths = include_paths,
flatc_args = flatc_args, flatc_args = flatc_args,
reflection_name = reflection_name, reflection_name = reflection_name,
reflection_visiblity = visibility, reflection_visibility = visibility,
) )
native.cc_library( native.cc_library(
name = name, name = name,

View File

@ -117,7 +117,7 @@ def InvokeNvcc(argv, log=False):
out_file = [ f for f in argv if f.startswith('/Fo') ] out_file = [ f for f in argv if f.startswith('/Fo') ]
if len(out_file) != 1: if len(out_file) != 1:
raise Error('Please sepecify exactly one output file for cuda compilation.') raise Error('Please specify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]] out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv) nvcc_compiler_options, argv = GetNvccOptions(argv)
@ -136,7 +136,7 @@ def InvokeNvcc(argv, log=False):
undefines, argv = GetOptionValue(argv, 'U') undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines] undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler # The rest of the unrecognized options should be passed to host compiler
host_compiler_options = [option for option in argv if option not in (src_files + out_file)] host_compiler_options = [option for option in argv if option not in (src_files + out_file)]
m_options = ["-m64"] m_options = ["-m64"]

View File

@ -110,7 +110,7 @@ def InvokeNvcc(argv, log=False):
out_file = [ f for f in argv if f.startswith('/Fo') ] out_file = [ f for f in argv if f.startswith('/Fo') ]
if len(out_file) != 1: if len(out_file) != 1:
raise Error('Please sepecify exactly one output file for cuda compilation.') raise Error('Please specify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]] out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv) nvcc_compiler_options, argv = GetNvccOptions(argv)
@ -129,7 +129,7 @@ def InvokeNvcc(argv, log=False):
undefines, argv = GetOptionValue(argv, 'U') undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines] undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler # The rest of the unrecognized options should be passed to host compiler
host_compiler_options = [option for option in argv if option not in (src_files + out_file)] host_compiler_options = [option for option in argv if option not in (src_files + out_file)]
m_options = ["-m64"] m_options = ["-m64"]

View File

@ -114,7 +114,7 @@ def InvokeNvcc(argv, log=False):
out_file = [f for f in argv if f.startswith('/Fo')] out_file = [f for f in argv if f.startswith('/Fo')]
if len(out_file) != 1: if len(out_file) != 1:
raise RuntimeError('Please sepecify exactly one output file for cuda compilation.') raise RuntimeError('Please specify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]] out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv) nvcc_compiler_options, argv = GetNvccOptions(argv)
@ -133,7 +133,7 @@ def InvokeNvcc(argv, log=False):
undefines, argv = GetOptionValue(argv, 'U') undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines] undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler # The rest of the unrecognized options should be passed to host compiler
host_compiler_options = [ host_compiler_options = [
option for option in argv if option not in (src_files + out_file) option for option in argv if option not in (src_files + out_file)
] ]

View File

@ -110,7 +110,7 @@ def InvokeNvcc(argv, log=False):
out_file = [ f for f in argv if f.startswith('/Fo') ] out_file = [ f for f in argv if f.startswith('/Fo') ]
if len(out_file) != 1: if len(out_file) != 1:
raise Error('Please sepecify exactly one output file for cuda compilation.') raise Error('Please specify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]] out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv) nvcc_compiler_options, argv = GetNvccOptions(argv)
@ -129,7 +129,7 @@ def InvokeNvcc(argv, log=False):
undefines, argv = GetOptionValue(argv, 'U') undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines] undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler # The rest of the unrecognized options should be passed to host compiler
host_compiler_options = [option for option in argv if option not in (src_files + out_file)] host_compiler_options = [option for option in argv if option not in (src_files + out_file)]
m_options = ["-m64"] m_options = ["-m64"]

View File

@ -117,7 +117,7 @@ def InvokeNvcc(argv, log=False):
out_file = [ f for f in argv if f.startswith('/Fo') ] out_file = [ f for f in argv if f.startswith('/Fo') ]
if len(out_file) != 1: if len(out_file) != 1:
raise Error('Please sepecify exactly one output file for cuda compilation.') raise Error('Please specify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]] out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv) nvcc_compiler_options, argv = GetNvccOptions(argv)
@ -136,7 +136,7 @@ def InvokeNvcc(argv, log=False):
undefines, argv = GetOptionValue(argv, 'U') undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines] undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler # The rest of the unrecognized options should be passed to host compiler
host_compiler_options = [option for option in argv if option not in (src_files + out_file)] host_compiler_options = [option for option in argv if option not in (src_files + out_file)]
m_options = ["-m64"] m_options = ["-m64"]