Internal change.

PiperOrigin-RevId: 234153216
This commit is contained in:
A. Unique TensorFlower 2019-02-15 09:00:18 -08:00 committed by TensorFlower Gardener
parent 1258797b11
commit 44985cbb76
14 changed files with 285 additions and 234 deletions

View File

@ -40,7 +40,6 @@ tensorflow/core/lib/wav/wav_io.cc
tensorflow/core/platform/cpu_info.cc
tensorflow/core/platform/default/logging.cc
tensorflow/core/platform/default/mutex.cc
tensorflow/core/platform/default/protobuf.cc
tensorflow/core/platform/default/tracing.cc
tensorflow/core/platform/denormal.cc
tensorflow/core/platform/env.cc
@ -53,6 +52,7 @@ tensorflow/core/platform/posix/error.cc
tensorflow/core/platform/posix/load_library.cc
tensorflow/core/platform/posix/port.cc
tensorflow/core/platform/posix/posix_file_system.cc
tensorflow/core/platform/protobuf.cc
tensorflow/core/platform/protobuf_util.cc
tensorflow/core/platform/setround.cc
tensorflow/core/platform/tensor_coding.cc

View File

@ -128,7 +128,6 @@ load(
"tf_additional_libdevice_srcs",
"tf_additional_minimal_lib_srcs",
"tf_additional_mpi_lib_defines",
"tf_additional_proto_compiler_hdrs",
"tf_additional_proto_hdrs",
"tf_additional_proto_srcs",
"tf_additional_test_deps",
@ -418,9 +417,8 @@ cc_library(
name = "platform_protobuf",
srcs = tf_platform_hdrs([
"protobuf.h",
]) + tf_platform_srcs([
"protobuf.cc",
]) + [
"platform/protobuf.cc",
"platform/protobuf_util.cc",
"lib/core/status.h",
],
@ -664,7 +662,7 @@ cc_library(
name = "lib_proto_compiler",
hdrs = [
"platform/protobuf_compiler.h",
] + tf_additional_proto_compiler_hdrs(),
],
copts = tf_copts(),
deps = tf_lib_proto_compiler_deps() + [
":lib_proto_parsing",
@ -1049,13 +1047,13 @@ cc_library(
"platform/default/integral_types.h",
"platform/default/logging.h",
"platform/default/mutex.h",
"platform/default/protobuf.h",
"platform/default/thread_annotations.h",
"platform/dynamic_annotations.h",
"platform/macros.h",
"platform/mutex.h",
"platform/platform.h",
"platform/prefetch.h",
"platform/protobuf.h",
"platform/thread_annotations.h",
"platform/types.h",
"platform/cpu_info.h",
@ -2317,6 +2315,7 @@ cc_library(
"platform/**/logging.cc",
"platform/**/human_readable_json.cc",
"platform/abi.cc",
"platform/protobuf.cc",
],
) + tf_additional_lib_srcs(
exclude = [

View File

@ -21,6 +21,7 @@ limitations under the License.
#include <vector>
#include "tensorflow/core/framework/allocator.h"
#include "tensorflow/core/platform/macros.h"
#include "tensorflow/core/platform/mutex.h"
#include "tensorflow/core/platform/numa.h"

View File

@ -530,19 +530,13 @@ def tf_additional_proto_hdrs():
return [
"platform/default/integral_types.h",
"platform/default/logging.h",
"platform/default/protobuf.h",
] + if_windows([
"platform/windows/integral_types.h",
])
def tf_additional_proto_compiler_hdrs():
return [
"platform/default/protobuf_compiler.h",
]
def tf_additional_proto_srcs():
return [
"platform/default/protobuf.cc",
"platform/protobuf.cc",
]
def tf_additional_human_readable_json_deps():

View File

@ -46,7 +46,7 @@ Status HumanReadableJsonToProto(const string& str, protobuf::Message* proto) {
return errors::Internal("Cannot parse JSON protos on Android");
#else
proto->Clear();
auto status = google::protobuf::util::JsonStringToMessage(str, proto);
auto status = protobuf::util::JsonStringToMessage(str, proto);
if (!status.ok()) {
// Convert error_msg google::protobuf::StringPiece to
// tensorflow::StringPiece.

View File

@ -1,47 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_H_
#define TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_H_
// IWYU pragma: private, include "third_party/tensorflow/core/platform/protobuf.h"
// IWYU pragma: friend third_party/tensorflow/core/platform/protobuf.h
#ifndef TENSORFLOW_LITE_PROTOS
#include "google/protobuf/descriptor.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/io/tokenizer.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/util/json_util.h"
#include "google/protobuf/util/type_resolver_util.h"
#endif
#include "google/protobuf/arena.h"
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
#include "google/protobuf/map.h"
#include "google/protobuf/repeated_field.h"
namespace tensorflow {
namespace protobuf = ::google::protobuf;
using protobuf_int64 = ::google::protobuf::int64;
using protobuf_uint64 = ::google::protobuf::uint64;
extern const char* kProtobufInt64Typename;
extern const char* kProtobufUint64Typename;
} // namespace tensorflow
#endif // TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_H_

View File

@ -1,25 +0,0 @@
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_COMPILER_H_
#define TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_COMPILER_H_
// IWYU pragma: private, include "third_party/tensorflow/core/platform/protobuf_compiler.h"
// IWYU pragma: friend third_party/tensorflow/core/platform/protobuf_compiler.h
#include "google/protobuf/compiler/importer.h"
#include "tensorflow/core/platform/default/protobuf.h"
#endif // TENSORFLOW_CORE_PLATFORM_DEFAULT_PROTOBUF_H_

View File

@ -1,30 +0,0 @@
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/platform/default/string_coding.h"
namespace tensorflow {
namespace port {
std::unique_ptr<StringListEncoder> NewStringListEncoder(string* out) {
return std::unique_ptr<StringListEncoder>(new StringListEncoder(out));
}
std::unique_ptr<StringListDecoder> NewStringListDecoder(const string& in) {
return std::unique_ptr<StringListDecoder>(new StringListDecoder(in));
}
} // namespace port
} // namespace tensorflow

View File

@ -1,98 +0,0 @@
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_PLATFORM_DEFAULT_STRING_CODING_H_
#define TENSORFLOW_CORE_PLATFORM_DEFAULT_STRING_CODING_H_
// IWYU pragma: private, include "third_party/tensorflow/core/platform/tensor_coding.h"
// IWYU pragma: friend third_party/tensorflow/core/platform/tensor_coding.h
#include "tensorflow/core/lib/core/coding.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/platform/protobuf.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
namespace port {
// Encodes sequences of strings and serialized protocol buffers into a string.
// Normal usage consists of zero or more calls to Append() and a single call to
// Finalize().
class StringListEncoder {
public:
explicit StringListEncoder(string* out) : out_(out) {}
// Encodes the given protocol buffer. This may not be called after Finalize().
void Append(const protobuf::MessageLite& m) {
core::PutVarint32(out_, m.ByteSize());
m.AppendToString(&rest_);
}
// Encodes the given string. This may not be called after Finalize().
void Append(const string& s) {
core::PutVarint32(out_, s.length());
strings::StrAppend(&rest_, s);
}
// Signals end of the encoding process. No other calls are allowed after this.
void Finalize() { strings::StrAppend(out_, rest_); }
private:
string* out_;
string rest_;
};
// Decodes a string into sequences of strings (which may represent serialized
// protocol buffers). Normal usage involves a single call to ReadSizes() in
// order to retrieve the length of all the strings in the sequence. For each
// size returned a call to Data() is expected and will return the actual
// string.
class StringListDecoder {
public:
explicit StringListDecoder(const string& in) : reader_(in) {}
// Populates the given vector with the lengths of each string in the sequence
// being decoded. Upon returning the vector is guaranteed to contain as many
// elements as there are strings in the sequence.
bool ReadSizes(std::vector<uint32>* sizes) {
int64 total = 0;
for (auto& size : *sizes) {
if (!core::GetVarint32(&reader_, &size)) return false;
total += size;
}
if (total != static_cast<int64>(reader_.size())) {
return false;
}
return true;
}
// Returns a pointer to the next string in the sequence, then prepares for the
// next call by advancing 'size' characters in the sequence.
const char* Data(uint32 size) {
const char* data = reader_.data();
reader_.remove_prefix(size);
return data;
}
private:
StringPiece reader_;
};
std::unique_ptr<StringListEncoder> NewStringListEncoder(string* out);
std::unique_ptr<StringListDecoder> NewStringListDecoder(const string& in);
} // namespace port
} // namespace tensorflow
#endif // TENSORFLOW_CORE_PLATFORM_DEFAULT_STRING_CODING_H_

View File

@ -1,4 +1,4 @@
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@ -13,11 +13,11 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/platform/default/protobuf.h"
#include "tensorflow/core/platform/protobuf.h"
namespace tensorflow {
const char* kProtobufInt64Typename = "::google::protobuf::int64";
const char* kProtobufUint64Typename = "::google::protobuf::uint64";
const char* kProtobufInt64Typename = "::tensorflow::protobuf_int64";
const char* kProtobufUint64Typename = "::tensorflow::protobuf_uint64";
} // namespace tensorflow

View File

@ -25,13 +25,31 @@ limitations under the License.
// TensorFlow code should use the ::tensorflow::protobuf namespace to
// refer to all protobuf APIs.
#if defined(PLATFORM_GOOGLE) && !defined(USE_DEFAULT_PROTOBUF)
#include "tensorflow/core/platform/google/protobuf.h"
#else
#include "tensorflow/core/platform/default/protobuf.h"
#ifndef TENSORFLOW_LITE_PROTOS
#include "google/protobuf/io/tokenizer.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/util/json_util.h"
#include "google/protobuf/util/type_resolver_util.h"
#endif
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
#include "google/protobuf/arena.h"
#include "google/protobuf/map.h"
#include "google/protobuf/repeated_field.h"
namespace tensorflow {
namespace protobuf = ::google::protobuf;
using protobuf_int64 = ::google::protobuf::int64;
using protobuf_uint64 = ::google::protobuf::uint64;
extern const char* kProtobufInt64Typename;
extern const char* kProtobufUint64Typename;
// Parses a protocol buffer contained in a string in the binary wire format.
// Returns true on success. Note: Unlike protobuf's builtin ParseFromString,
// this function has no size restrictions on the total size of the encoded
@ -47,9 +65,20 @@ inline const string& ProtobufStringToString(const string& s) { return s; }
// Set <dest> to <src>. Swapping is allowed, as <src> does not need to be
// preserved.
inline void SetProtobufStringSwapAllowed(string* src, string* dest) {
dest->swap(*src);
*dest = std::move(*src);
}
#if defined(TENSORFLOW_PROTOBUF_USES_CORD)
// These versions of ProtobufStringToString and SetProtobufString get used by
// tools/proto_text's generated code. They have the same name as the versions
// in core/platform/protobuf.h, so the generation code doesn't need to determine
// if the type is Cord or string at generation time.
inline string ProtobufStringToString(const Cord& s) { return s.ToString(); }
inline void SetProtobufStringSwapAllowed(string* src, Cord* dest) {
dest->CopyFrom(*src);
}
#endif // defined(TENSORFLOW_PROTOBUF_USES_CORD)
} // namespace tensorflow
#endif // TENSORFLOW_CORE_PLATFORM_PROTOBUF_H_

View File

@ -16,10 +16,6 @@ limitations under the License.
#ifndef TENSORFLOW_PLATFORM_PROTOBUF_COMPILER_H_
#define TENSORFLOW_PLATFORM_PROTOBUF_COMPILER_H_
#if defined(PLATFORM_GOOGLE) && !defined(USE_DEFAULT_PROTOBUF)
#include "tensorflow/core/platform/google/protobuf_compiler.h"
#else
#include "tensorflow/core/platform/default/protobuf_compiler.h"
#endif
#include "google/protobuf/compiler/importer.h"
#endif // TENSORFLOW_PLATFORM_PROTOBUF_COMPILER_H_

View File

@ -19,6 +19,12 @@ limitations under the License.
#include "tensorflow/core/lib/core/coding.h"
#include "tensorflow/core/lib/core/stringpiece.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/platform/protobuf.h"
#if defined(TENSORFLOW_PROTOBUF_USES_CORD)
#include "strings/cord_varint.h"
#endif // defined(TENSORFLOW_PROTOBUF_USES_CORD)
namespace tensorflow {
namespace port {
@ -66,5 +72,174 @@ void CopyFromArray(string* s, const char* base, size_t bytes) {
s->assign(base, bytes);
}
class StringListEncoderImpl : public StringListEncoder {
public:
explicit StringListEncoderImpl(string* out) : out_(out) {}
~StringListEncoderImpl() override = default;
void Append(const protobuf::MessageLite& m) override {
core::PutVarint32(out_, m.ByteSizeLong());
tensorflow::string serialized_message;
m.AppendToString(&serialized_message);
strings::StrAppend(&rest_, serialized_message);
}
void Append(const string& s) override {
core::PutVarint32(out_, s.length());
strings::StrAppend(&rest_, s);
}
void Finalize() override { strings::StrAppend(out_, rest_); }
private:
string* out_;
string rest_;
};
class StringListDecoderImpl : public StringListDecoder {
public:
explicit StringListDecoderImpl(const string& in) : reader_(in) {}
~StringListDecoderImpl() override = default;
bool ReadSizes(std::vector<uint32>* sizes) override {
int64 total = 0;
for (auto& size : *sizes) {
if (!core::GetVarint32(&reader_, &size)) return false;
total += size;
}
if (total != static_cast<int64>(reader_.size())) {
return false;
}
return true;
}
const char* Data(uint32 size) override {
const char* data = reader_.data();
reader_.remove_prefix(size);
return data;
}
private:
StringPiece reader_;
};
std::unique_ptr<StringListEncoder> NewStringListEncoder(string* out) {
return std::unique_ptr<StringListEncoder>(new StringListEncoderImpl(out));
}
std::unique_ptr<StringListDecoder> NewStringListDecoder(const string& in) {
return std::unique_ptr<StringListDecoder>(new StringListDecoderImpl(in));
}
#if defined(TENSORFLOW_PROTOBUF_USES_CORD)
void AssignRefCounted(StringPiece src, core::RefCounted* obj, Cord* out) {
obj->Ref();
out->Clear();
// Defines a lambda to unref "obj" when Cord deletes this piece of
// memory. +[] converts the lambda to a C style function pointer.
auto cleanup = +[](absl::string_view donotcare, void* obj) {
reinterpret_cast<core::RefCounted*>(obj)->Unref();
};
out->AppendExternalMemory(absl::string_view(src.data(), src.size()), obj,
cleanup);
}
void EncodeStringList(const string* strings, int64 n, Cord* out) {
out->Clear();
for (int i = 0; i < n; ++i) {
::strings::CordAppendVarint(strings[i].size(), out);
}
for (int i = 0; i < n; ++i) {
out->Append(strings[i]);
}
}
bool DecodeStringList(const Cord& src, string* strings, int64 n) {
std::vector<uint32> sizes(n);
CordReader reader(src);
int64 tot = 0;
for (auto& v : sizes) {
if (!::strings::CordReaderReadVarint(&reader, &v)) return false;
tot += v;
}
if (tot != reader.Available()) {
return false;
}
string* data = strings;
for (int i = 0; i < n; ++i, ++data) {
auto size = sizes[i];
if (size > reader.Available()) {
return false;
}
gtl::STLStringResizeUninitialized(data, size);
reader.ReadN(size, gtl::string_as_array(data));
}
return true;
}
void CopyFromArray(Cord* c, const char* base, size_t bytes) {
c->CopyFrom(base, bytes);
}
class CordStringListEncoderImpl : public StringListEncoder {
public:
explicit CordStringListEncoderImpl(Cord* out) : out_(out) {}
~CordStringListEncoderImpl() override = default;
void Append(const protobuf::MessageLite& m) override {
::strings::CordAppendVarint(m.ByteSizeLong(), out_);
m.AppendToString(&rest_);
}
void Append(const string& s) override {
::strings::CordAppendVarint(s.length(), out_);
rest_.append(s.data(), s.size());
}
void Finalize() override { out_->Append(rest_); }
private:
Cord* out_;
string rest_;
};
class CordStringListDecoderImpl : public StringListDecoder {
public:
explicit CordStringListDecoderImpl(const Cord& in) : reader_(in) {}
~CordStringListDecoderImpl() override = default;
bool ReadSizes(std::vector<uint32>* sizes) override {
int64 total = 0;
for (auto& size : *sizes) {
if (!::strings::CordReaderReadVarint(&reader_, &size)) return false;
total += size;
}
if (total != static_cast<int64>(reader_.Available())) {
return false;
}
return true;
}
const char* Data(uint32 size) override {
tmp_.resize(size);
reader_.ReadN(size, tmp_.data());
return tmp_.data();
}
private:
CordReader reader_;
std::vector<char> tmp_;
};
std::unique_ptr<StringListEncoder> NewStringListEncoder(Cord* out) {
return std::unique_ptr<StringListEncoder>(new CordStringListEncoderImpl(out));
}
std::unique_ptr<StringListDecoder> NewStringListDecoder(const Cord& in) {
return std::unique_ptr<StringListDecoder>(new CordStringListDecoderImpl(in));
}
#endif // defined(TENSORFLOW_PROTOBUF_USES_CORD)
} // namespace port
} // namespace tensorflow

View File

@ -21,14 +21,9 @@ limitations under the License.
#include "tensorflow/core/lib/core/refcount.h"
#include "tensorflow/core/lib/core/stringpiece.h"
#include "tensorflow/core/platform/platform.h"
#include "tensorflow/core/platform/protobuf.h"
#include "tensorflow/core/platform/types.h"
#ifdef PLATFORM_GOOGLE
#include "tensorflow/core/platform/google/cord_coding.h"
#else
#include "tensorflow/core/platform/default/string_coding.h"
#endif
namespace tensorflow {
namespace port {
@ -52,6 +47,68 @@ bool DecodeStringList(const string& src, string* strings, int64 n);
// Assigns base[0..bytes-1] to *s
void CopyFromArray(string* s, const char* base, size_t bytes);
// Encodes sequences of strings and serialized protocol buffers into a string.
// Normal usage consists of zero or more calls to Append() and a single call to
// Finalize().
class StringListEncoder {
public:
virtual ~StringListEncoder() = default;
// Encodes the given protocol buffer. This may not be called after Finalize().
virtual void Append(const protobuf::MessageLite& m) = 0;
// Encodes the given string. This may not be called after Finalize().
virtual void Append(const string& s) = 0;
// Signals end of the encoding process. No other calls are allowed after this.
virtual void Finalize() = 0;
};
// Decodes a string into sequences of strings (which may represent serialized
// protocol buffers). Normal usage involves a single call to ReadSizes() in
// order to retrieve the length of all the strings in the sequence. For each
// size returned a call to Data() is expected and will return the actual
// string.
class StringListDecoder {
public:
virtual ~StringListDecoder() = default;
// Populates the given vector with the lengths of each string in the sequence
// being decoded. Upon returning the vector is guaranteed to contain as many
// elements as there are strings in the sequence.
virtual bool ReadSizes(std::vector<uint32>* sizes) = 0;
// Returns a pointer to the next string in the sequence, then prepares for the
// next call by advancing 'size' characters in the sequence.
virtual const char* Data(uint32 size) = 0;
};
std::unique_ptr<StringListEncoder> NewStringListEncoder(string* out);
std::unique_ptr<StringListDecoder> NewStringListDecoder(const string& in);
#if defined(TENSORFLOW_PROTOBUF_USES_CORD)
// Store src contents in *out. If backing memory for src is shared with *out,
// will ref obj during the call and will arrange to unref obj when no
// longer needed.
void AssignRefCounted(StringPiece src, core::RefCounted* obj, Cord* out);
// TODO(kmensah): Macro guard this with a check for Cord support.
inline void CopyToArray(const Cord& src, char* dst) { src.CopyToArray(dst); }
// Store encoding of strings[0..n-1] in *out.
void EncodeStringList(const string* strings, int64 n, Cord* out);
// Decode n strings from src and store in strings[0..n-1].
// Returns true if successful, false on parse error.
bool DecodeStringList(const Cord& src, string* strings, int64 n);
// Assigns base[0..bytes-1] to *c
void CopyFromArray(Cord* c, const char* base, size_t bytes);
std::unique_ptr<StringListEncoder> NewStringListEncoder(Cord* out);
std::unique_ptr<StringListDecoder> NewStringListDecoder(const Cord& in);
#endif // defined(TENSORFLOW_PROTOBUF_USES_CORD)
} // namespace port
} // namespace tensorflow