Fix tfprof to measure the actual kernel execution time.
Change: 151166174
This commit is contained in:
parent
45dbb0a02d
commit
404db0358a
@ -52,6 +52,7 @@ sys.stdout.write('total_params: %d\n' % param_stats.total_parameters)
|
||||
# also requires complete shape information. It is common that shape is unknown
|
||||
# statically. To complete the shape, provide run-time shape information with
|
||||
# tf.RunMetadata to the API (See next example on how to provide RunMetadata).
|
||||
#
|
||||
tf.contrib.tfprof.model_analyzer.print_model_analysis(
|
||||
tf.get_default_graph(),
|
||||
tfprof_options=tf.contrib.tfprof.model_analyzer.FLOAT_OPS_OPTIONS)
|
||||
@ -64,8 +65,16 @@ compute the memory and timing statistics.
|
||||
```python
|
||||
# Generate the meta information for the model that contains the memory usage
|
||||
# and timing information.
|
||||
#
|
||||
# Note: When run on GPU, a kernel is first scheduled (enqueued) and then
|
||||
# executed asynchronously. tfprof only tracks the execution time.
|
||||
# Which is from proto CostGraphDef::Node::compute_cost.
|
||||
# In addition, a substantial of time might be spent between Python and
|
||||
# TensorFlow runtime, which is also not tracked by tfprof.
|
||||
#
|
||||
config = tf.ConfigProto(graph_options=tf.GraphOptions(build_cost_model=1))
|
||||
run_metadata = tf.RunMetadata()
|
||||
with tf.Session() as sess:
|
||||
with tf.Session(config=config) as sess:
|
||||
_ = sess.run(train_op,
|
||||
options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
|
||||
run_metadata=run_metadata)
|
||||
|
@ -102,7 +102,9 @@ tf_cc_test(
|
||||
name = "tfprof_show_test",
|
||||
srcs = ["tfprof_show_test.cc"],
|
||||
data = [
|
||||
"testdata/ckpt",
|
||||
"testdata/ckpt.data-00000-of-00001",
|
||||
"testdata/ckpt.index",
|
||||
"testdata/ckpt.meta",
|
||||
"testdata/graph.pbtxt",
|
||||
"testdata/run_meta",
|
||||
"testdata/tfprof_log",
|
||||
@ -176,7 +178,9 @@ tf_cc_test(
|
||||
name = "tfprof_stats_test",
|
||||
srcs = ["tfprof_stats_test.cc"],
|
||||
data = [
|
||||
"testdata/ckpt",
|
||||
"testdata/ckpt.data-00000-of-00001",
|
||||
"testdata/ckpt.index",
|
||||
"testdata/ckpt.meta",
|
||||
"testdata/graph.pbtxt",
|
||||
"testdata/run_meta",
|
||||
"testdata/tfprof_log",
|
||||
@ -212,7 +216,9 @@ tf_cc_test(
|
||||
name = "tfprof_tensor_test",
|
||||
srcs = ["tfprof_tensor_test.cc"],
|
||||
data = [
|
||||
"testdata/ckpt",
|
||||
"testdata/ckpt.data-00000-of-00001",
|
||||
"testdata/ckpt.index",
|
||||
"testdata/ckpt.meta",
|
||||
"testdata/graph.pbtxt",
|
||||
],
|
||||
deps = [
|
||||
|
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt
vendored
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt
vendored
Binary file not shown.
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.data-00000-of-00001
vendored
Normal file
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.data-00000-of-00001
vendored
Normal file
Binary file not shown.
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.index
vendored
Normal file
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.index
vendored
Normal file
Binary file not shown.
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.meta
vendored
Normal file
BIN
tensorflow/tools/tfprof/internal/testdata/ckpt.meta
vendored
Normal file
Binary file not shown.
1451
tensorflow/tools/tfprof/internal/testdata/graph.pbtxt
vendored
1451
tensorflow/tools/tfprof/internal/testdata/graph.pbtxt
vendored
File diff suppressed because it is too large
Load Diff
BIN
tensorflow/tools/tfprof/internal/testdata/run_meta
vendored
BIN
tensorflow/tools/tfprof/internal/testdata/run_meta
vendored
Binary file not shown.
@ -1,9 +1,17 @@
|
||||
|
||||
|
||||
Conv2D_1€$
|
||||
|
||||
DW2_trainable_variables
|
||||
|
||||
DW_trainable_variables
|
||||
|
||||
Conv2DÈ-
|
||||
|
||||
conv2d_2/BiasAdd
|
||||
|
||||
conv2d/BiasAddè
|
||||
%
|
||||
conv2d_1/bias_trainable_variables
|
||||
|
||||
conv2d_2/convolutionÀp
|
||||
|
||||
conv2d/convolutionð—
|
||||
#
|
||||
conv2d/bias_trainable_variables
|
||||
'
|
||||
conv2d_1/kernel_trainable_variables
|
||||
%
|
||||
conv2d/kernel_trainable_variables
|
@ -29,7 +29,7 @@ void TFNode::AddStepStat(const string& device, const NodeExecStats* step_stat) {
|
||||
|
||||
op_start_micros_ = step_stat_->all_start_micros();
|
||||
if (step_stat_->op_end_rel_micros() && step_stat_->op_start_rel_micros()) {
|
||||
op_exec_micros_ =
|
||||
op_schedule_micros_ =
|
||||
step_stat_->op_end_rel_micros() - step_stat_->op_start_rel_micros();
|
||||
}
|
||||
all_spent_micros_ = step_stat_->all_end_rel_micros();
|
||||
@ -43,5 +43,9 @@ void TFNode::AddStepStat(const string& device, const NodeExecStats* step_stat) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void TFNode::AddNodeStat(const CostGraphDef::Node* cost_node) {
|
||||
kernel_compute_micros_ = cost_node->compute_cost();
|
||||
}
|
||||
} // namespace tfprof
|
||||
} // namespace tensorflow
|
||||
|
@ -23,6 +23,7 @@ limitations under the License.
|
||||
|
||||
#include "tensorflow/core/framework/allocation_description.pb.h"
|
||||
#include "tensorflow/core/framework/attr_value.pb.h"
|
||||
#include "tensorflow/core/framework/cost_graph.pb.h"
|
||||
#include "tensorflow/core/framework/node_def.pb.h"
|
||||
#include "tensorflow/core/framework/step_stats.pb.h"
|
||||
#include "tensorflow/core/framework/tensor_description.pb.h"
|
||||
@ -39,7 +40,8 @@ class TFNode {
|
||||
: node_(node),
|
||||
step_stat_(nullptr),
|
||||
op_start_micros_(0),
|
||||
op_exec_micros_(0),
|
||||
op_schedule_micros_(0),
|
||||
kernel_compute_micros_(0),
|
||||
all_spent_micros_(0),
|
||||
requested_bytes_(0),
|
||||
float_ops_(0) {
|
||||
@ -76,12 +78,19 @@ class TFNode {
|
||||
|
||||
void AddStepStat(const string& device, const NodeExecStats* step_stat);
|
||||
|
||||
// Add CostGraphDef::Node.
|
||||
void AddNodeStat(const CostGraphDef::Node* cost_node);
|
||||
|
||||
void AddFloatOps(int64 float_ops) { float_ops_ = float_ops; }
|
||||
|
||||
const NodeDef* node_def() { return node_; }
|
||||
const std::map<string, TFNode*>& inputs() { return inputs_; }
|
||||
int64 op_start_micros() { return op_start_micros_; }
|
||||
int64 op_exec_micros() { return op_exec_micros_; }
|
||||
// This is time spent in Op::Compute(), which is GPU kernel schedule time.
|
||||
// Currently not used.
|
||||
int64 op_schedule_micros() { return op_schedule_micros_; }
|
||||
// This is time spent in kernel execution.
|
||||
int64 kernel_compute_micros() { return kernel_compute_micros_; }
|
||||
int64 all_spent_micros() { return all_spent_micros_; }
|
||||
int64 requested_byptes() { return requested_bytes_; }
|
||||
int64 float_ops() { return float_ops_; }
|
||||
@ -101,7 +110,8 @@ class TFNode {
|
||||
std::set<string> op_types_;
|
||||
string device_;
|
||||
int64 op_start_micros_;
|
||||
int64 op_exec_micros_;
|
||||
int64 op_schedule_micros_;
|
||||
int64 kernel_compute_micros_;
|
||||
int64 all_spent_micros_;
|
||||
int64 requested_bytes_;
|
||||
int64 float_ops_;
|
||||
|
@ -30,7 +30,7 @@ ShowNode::ShowNode(TFNode* node) : node(node), account(true) {
|
||||
if (!node->device().empty()) {
|
||||
mutable_proto()->set_device(node->device());
|
||||
}
|
||||
mutable_proto()->set_exec_micros(node->op_exec_micros());
|
||||
mutable_proto()->set_exec_micros(node->kernel_compute_micros());
|
||||
mutable_proto()->set_requested_bytes(node->requested_byptes());
|
||||
mutable_proto()->set_float_ops(node->float_ops());
|
||||
|
||||
|
@ -72,7 +72,7 @@ class TFProfShowTest : public ::testing::Test {
|
||||
TEST_F(TFProfShowTest, DumpScopeMode) {
|
||||
string dump_file = io::JoinPath(testing::TmpDir(), "dump");
|
||||
Options opts(5, 0, 0, 0, 0, {".*"}, "name",
|
||||
{"Variable"}, // accout_type_regexes
|
||||
{"VariableV2"}, // accout_type_regexes
|
||||
{".*"}, {""}, {".*"}, {""}, false,
|
||||
{"params", "bytes", "micros", "float_ops", "num_hidden_ops"},
|
||||
false, dump_file);
|
||||
@ -81,9 +81,12 @@ TEST_F(TFProfShowTest, DumpScopeMode) {
|
||||
string dump_str;
|
||||
TF_CHECK_OK(ReadFileToString(Env::Default(), dump_file, &dump_str));
|
||||
EXPECT_EQ(
|
||||
"_TFProfRoot (--/450 params, --/0 flops, --/1.80KB, --/0us)\n DW "
|
||||
"(3x3x3x6, 162/162 params, 0/0 flops, 648B/648B, 0us/0us)\n DW2 "
|
||||
"(2x2x6x12, 288/288 params, 0/0 flops, 1.15KB/1.15KB, 0us/0us)\n",
|
||||
"_TFProfRoot (--/370 params, --/0 flops, --/1.48KB, --/5us)\n "
|
||||
"conv2d/bias (5, 5/5 params, 0/0 flops, 20B/20B, 1us/1us)\n "
|
||||
"conv2d/kernel (3x3x3x5, 135/135 params, 0/0 flops, 540B/540B, "
|
||||
"1us/1us)\n conv2d_1/bias (5, 5/5 params, 0/0 flops, 20B/20B, "
|
||||
"1us/1us)\n conv2d_1/kernel (3x3x5x5, 225/225 params, 0/0 flops, "
|
||||
"900B/900B, 2us/2us)\n",
|
||||
dump_str);
|
||||
}
|
||||
|
||||
|
@ -125,6 +125,20 @@ void TFStats::ParseRunMeta() {
|
||||
node->second.AddStepStat(dev_stat.device(), &node_stat);
|
||||
}
|
||||
}
|
||||
|
||||
if (!run_meta_->has_cost_graph()) {
|
||||
fprintf(stderr,
|
||||
"Missing CostGraphDef in RunMetadata.\nMaybe you forget to"
|
||||
"set tf.ConfigProto(graph_options=tf.GraphOptions("
|
||||
"build_cost_model=1)) to Session()\n");
|
||||
}
|
||||
for (const auto& node_pb : run_meta_->cost_graph().node()) {
|
||||
auto node = nodes_map_.find(node_pb.name());
|
||||
if (node == nodes_map_.end()) {
|
||||
continue;
|
||||
}
|
||||
node->second.AddNodeStat(&node_pb);
|
||||
}
|
||||
}
|
||||
} // namespace tfprof
|
||||
} // namespace tensorflow
|
||||
|
@ -81,16 +81,24 @@ TEST_F(TFProfStatsTest, CustomOpType) {
|
||||
TFProfNode expected;
|
||||
CHECK(protobuf::TextFormat::ParseFromString(
|
||||
"name: \"_TFProfRoot\"\nexec_micros: 0\nrequested_bytes: "
|
||||
"0\ntotal_exec_micros: 0\ntotal_requested_bytes: 1800\ntotal_parameters: "
|
||||
"450\nchildren {\n name: \"DW\"\n exec_micros: 0\n requested_bytes: "
|
||||
"648\n parameters: 162\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 648\n total_parameters: 162\n device: "
|
||||
"0\ntotal_exec_micros: 5\ntotal_requested_bytes: 1480\ntotal_parameters: "
|
||||
"370\nchildren {\n name: \"conv2d/bias\"\n exec_micros: 1\n "
|
||||
"requested_bytes: 20\n parameters: 5\n total_exec_micros: 1\n "
|
||||
"total_requested_bytes: 20\n total_parameters: 5\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 0\n "
|
||||
"total_float_ops: 0\n}\nchildren {\n name: \"DW2\"\n exec_micros: 0\n "
|
||||
"requested_bytes: 1152\n parameters: 288\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 1152\n total_parameters: 288\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 0\n "
|
||||
"total_float_ops: 0\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
"total_float_ops: 0\n}\nchildren {\n name: \"conv2d/kernel\"\n "
|
||||
"exec_micros: 1\n requested_bytes: 540\n parameters: 135\n "
|
||||
"total_exec_micros: 1\n total_requested_bytes: 540\n total_parameters: "
|
||||
"135\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nchildren {\n name: \"conv2d_1/bias\"\n "
|
||||
"exec_micros: 1\n requested_bytes: 20\n parameters: 5\n "
|
||||
"total_exec_micros: 1\n total_requested_bytes: 20\n total_parameters: "
|
||||
"5\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nchildren {\n name: \"conv2d_1/kernel\"\n "
|
||||
"exec_micros: 2\n requested_bytes: 900\n parameters: 225\n "
|
||||
"total_exec_micros: 2\n total_requested_bytes: 900\n total_parameters: "
|
||||
"225\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
&expected));
|
||||
EXPECT_EQ(expected.DebugString(), root.DebugString());
|
||||
}
|
||||
@ -105,16 +113,24 @@ TEST_F(TFProfStatsTest, CheckPointOpType) {
|
||||
TFProfNode expected;
|
||||
CHECK(protobuf::TextFormat::ParseFromString(
|
||||
"name: \"_TFProfRoot\"\nexec_micros: 0\nrequested_bytes: "
|
||||
"0\ntotal_exec_micros: 0\ntotal_requested_bytes: 1800\ntotal_parameters: "
|
||||
"450\nchildren {\n name: \"DW\"\n exec_micros: 0\n requested_bytes: "
|
||||
"648\n parameters: 162\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 648\n total_parameters: 162\n device: "
|
||||
"0\ntotal_exec_micros: 5\ntotal_requested_bytes: 1480\ntotal_parameters: "
|
||||
"370\nchildren {\n name: \"conv2d/bias\"\n exec_micros: 1\n "
|
||||
"requested_bytes: 20\n parameters: 5\n total_exec_micros: 1\n "
|
||||
"total_requested_bytes: 20\n total_parameters: 5\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 0\n "
|
||||
"total_float_ops: 0\n}\nchildren {\n name: \"DW2\"\n exec_micros: 0\n "
|
||||
"requested_bytes: 1152\n parameters: 288\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 1152\n total_parameters: 288\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 0\n "
|
||||
"total_float_ops: 0\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
"total_float_ops: 0\n}\nchildren {\n name: \"conv2d/kernel\"\n "
|
||||
"exec_micros: 1\n requested_bytes: 540\n parameters: 135\n "
|
||||
"total_exec_micros: 1\n total_requested_bytes: 540\n total_parameters: "
|
||||
"135\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nchildren {\n name: \"conv2d_1/bias\"\n "
|
||||
"exec_micros: 1\n requested_bytes: 20\n parameters: 5\n "
|
||||
"total_exec_micros: 1\n total_requested_bytes: 20\n total_parameters: "
|
||||
"5\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nchildren {\n name: \"conv2d_1/kernel\"\n "
|
||||
"exec_micros: 2\n requested_bytes: 900\n parameters: 225\n "
|
||||
"total_exec_micros: 2\n total_requested_bytes: 900\n total_parameters: "
|
||||
"225\n device: \"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: "
|
||||
"0\n total_float_ops: 0\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
&expected));
|
||||
EXPECT_EQ(expected.DebugString(), root.DebugString());
|
||||
}
|
||||
@ -144,16 +160,24 @@ TEST_F(TFProfStatsTest, TestFloatOps) {
|
||||
TFProfNode expected;
|
||||
CHECK(protobuf::TextFormat::ParseFromString(
|
||||
"name: \"_TFProfRoot\"\nexec_micros: 0\nrequested_bytes: "
|
||||
"0\ntotal_exec_micros: 11\ntotal_requested_bytes: "
|
||||
"5280\ntotal_parameters: 450\nchildren {\n name: \"Conv2D\"\n "
|
||||
"exec_micros: 0\n requested_bytes: 432\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 432\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 5832\n "
|
||||
"total_float_ops: 5832\n}\nchildren {\n name: \"Conv2D_1\"\n "
|
||||
"exec_micros: 10\n requested_bytes: 384\n total_exec_micros: 10\n "
|
||||
"total_requested_bytes: 384\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 4608\n "
|
||||
"total_float_ops: 4608\n}\nfloat_ops: 0\ntotal_float_ops: 10440\n",
|
||||
"0\ntotal_exec_micros: 96\ntotal_requested_bytes: "
|
||||
"8656\ntotal_parameters: 370\nchildren {\n name: \"conv2d/BiasAdd\"\n "
|
||||
"exec_micros: 12\n requested_bytes: 1440\n total_exec_micros: 12\n "
|
||||
"total_requested_bytes: 1440\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 360\n "
|
||||
"total_float_ops: 360\n}\nchildren {\n name: \"conv2d/convolution\"\n "
|
||||
"exec_micros: 60\n requested_bytes: 1440\n total_exec_micros: 60\n "
|
||||
"total_requested_bytes: 1440\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 19440\n "
|
||||
"total_float_ops: 19440\n}\nchildren {\n name: \"conv2d_2/BiasAdd\"\n "
|
||||
"exec_micros: 2\n requested_bytes: 640\n total_exec_micros: 2\n "
|
||||
"total_requested_bytes: 640\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 160\n "
|
||||
"total_float_ops: 160\n}\nchildren {\n name: \"conv2d_2/convolution\"\n "
|
||||
" exec_micros: 13\n requested_bytes: 640\n total_exec_micros: 13\n "
|
||||
"total_requested_bytes: 640\n total_parameters: 0\n device: "
|
||||
"\"/job:localhost/replica:0/task:0/cpu:0\"\n float_ops: 14400\n "
|
||||
"total_float_ops: 14400\n}\nfloat_ops: 0\ntotal_float_ops: 34360\n",
|
||||
&expected));
|
||||
EXPECT_EQ(expected.DebugString(), root.DebugString());
|
||||
}
|
||||
@ -183,8 +207,8 @@ TEST_F(TFProfStatsTest, TestShowTensorValue) {
|
||||
TFProfNode expected;
|
||||
CHECK(protobuf::TextFormat::ParseFromString(
|
||||
"name: \"_TFProfRoot\"\nexec_micros: 0\nrequested_bytes: "
|
||||
"0\ntotal_exec_micros: 11\ntotal_requested_bytes: "
|
||||
"5280\ntotal_parameters: 450\nfloat_ops: 0\ntotal_float_ops: 10440\n",
|
||||
"0\ntotal_exec_micros: 96\ntotal_requested_bytes: "
|
||||
"8656\ntotal_parameters: 370\nfloat_ops: 0\ntotal_float_ops: 34360\n",
|
||||
&expected));
|
||||
EXPECT_EQ(expected.DebugString(), root.DebugString());
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ class TFProfTensorTest : public ::testing::Test {
|
||||
};
|
||||
|
||||
TEST_F(TFProfTensorTest, Basics) {
|
||||
Options opts(3, 0, 0, 0, 0, {".*"}, "name", {"Variable"}, {".*"}, {""},
|
||||
Options opts(3, 0, 0, 0, 0, {".*"}, "name", {"VariableV2"}, {".*"}, {""},
|
||||
{".*"}, {""}, false, {"tensor_value"}, // show the tensor value.
|
||||
false);
|
||||
const TFProfNode& root = tf_stats_->PrintGraph("scope", opts);
|
||||
@ -64,239 +64,176 @@ TEST_F(TFProfTensorTest, Basics) {
|
||||
CHECK(protobuf::TextFormat::ParseFromString(
|
||||
"name: \"_TFProfRoot\"\nexec_micros: 0\nrequested_bytes: "
|
||||
"0\ntotal_exec_micros: 0\ntotal_requested_bytes: 0\ntotal_parameters: "
|
||||
"450\nchildren {\n name: \"DW\"\n exec_micros: 0\n requested_bytes: "
|
||||
"0\n parameters: 162\n total_exec_micros: 0\n total_requested_bytes: "
|
||||
"0\n total_parameters: 162\n float_ops: 0\n total_float_ops: 0\n "
|
||||
"tensor_value {\n dtype: DT_FLOAT\n value_double: -0.00117808\n "
|
||||
"value_double: -0.000709941\n value_double: -0.00174816\n "
|
||||
"value_double: -0.000495372\n value_double: 0.000243039\n "
|
||||
"value_double: -0.000126313\n value_double: -0.000663929\n "
|
||||
"value_double: -0.000495198\n value_double: -0.000893934\n "
|
||||
"value_double: -0.00179659\n value_double: 0.000408874\n "
|
||||
"value_double: -0.00120166\n value_double: -0.00109484\n "
|
||||
"value_double: -0.000200362\n value_double: 0.000726721\n "
|
||||
"value_double: -0.000277568\n value_double: 0.00180584\n "
|
||||
"value_double: 0.000997271\n value_double: -0.00185987\n "
|
||||
"value_double: -0.00113401\n value_double: -0.000528852\n "
|
||||
"value_double: -0.000197412\n value_double: 1.32871e-05\n "
|
||||
"value_double: -0.000285896\n value_double: -0.000428898\n "
|
||||
"value_double: -0.000424633\n value_double: 2.15488e-05\n "
|
||||
"value_double: 0.00149753\n value_double: -0.000884576\n "
|
||||
"value_double: -0.0013795\n value_double: -0.000650125\n "
|
||||
"value_double: 0.00191612\n value_double: 4.71838e-05\n "
|
||||
"value_double: 0.000400201\n value_double: 0.00239555\n "
|
||||
"value_double: -0.00177706\n value_double: -0.000781899\n "
|
||||
"value_double: -0.00145247\n value_double: 0.0020025\n "
|
||||
"value_double: 0.000597419\n value_double: 0.00135456\n "
|
||||
"value_double: 0.0015876\n value_double: -0.000993568\n "
|
||||
"value_double: 0.0006509\n value_double: -0.000894533\n "
|
||||
"value_double: -0.00129322\n value_double: 0.0003859\n "
|
||||
"value_double: 0.000415186\n value_double: -0.000439212\n "
|
||||
"value_double: 0.000442138\n value_double: 0.00212353\n "
|
||||
"value_double: 0.000702953\n value_double: 0.000713424\n "
|
||||
"value_double: -0.000304877\n value_double: -9.17046e-05\n "
|
||||
"value_double: -0.000801103\n value_double: 0.000304854\n "
|
||||
"value_double: -0.00070527\n value_double: -0.00106408\n "
|
||||
"value_double: -0.000909906\n value_double: -4.49183e-05\n "
|
||||
"value_double: 0.000104172\n value_double: -0.000438067\n "
|
||||
"value_double: -0.000317689\n value_double: -0.000769914\n "
|
||||
"value_double: -0.00157729\n value_double: 0.000220733\n "
|
||||
"value_double: 0.00107268\n value_double: -0.000186449\n "
|
||||
"value_double: -0.000807328\n value_double: 0.000456308\n "
|
||||
"value_double: -0.000593729\n value_double: -0.000954873\n "
|
||||
"value_double: -0.000268676\n value_double: 9.06328e-05\n "
|
||||
"value_double: -0.000323473\n value_double: -0.000628768\n "
|
||||
"value_double: 0.000664985\n value_double: 0.0020999\n "
|
||||
"value_double: -0.000932228\n value_double: -0.00203203\n "
|
||||
"value_double: 0.000565405\n value_double: 0.000167899\n "
|
||||
"value_double: 0.00054897\n value_double: 0.000612407\n "
|
||||
"value_double: -0.000619301\n value_double: 0.00169361\n "
|
||||
"value_double: -0.000188057\n value_double: 0.000267652\n "
|
||||
"value_double: -0.00127341\n value_double: -0.000218836\n "
|
||||
"value_double: -0.000431722\n value_double: 5.41867e-05\n "
|
||||
"value_double: 0.000296628\n value_double: 0.000819415\n "
|
||||
"value_double: -0.000758993\n value_double: -0.000114477\n "
|
||||
"value_double: 6.29219e-05\n value_double: 0.000726988\n "
|
||||
"value_double: -0.00135974\n value_double: 2.28447e-05\n "
|
||||
"value_double: 0.00120547\n value_double: -0.00136907\n "
|
||||
"value_double: -0.00140188\n value_double: 0.000201145\n "
|
||||
"value_double: -0.000774109\n value_double: 0.000798465\n "
|
||||
"value_double: -0.00131861\n value_double: 3.08996e-05\n "
|
||||
"value_double: -0.000637026\n value_double: 0.00228975\n "
|
||||
"value_double: -0.000633757\n value_double: -0.00116047\n "
|
||||
"value_double: 7.66039e-05\n value_double: 2.09167e-06\n "
|
||||
"value_double: -0.000296448\n value_double: 0.000206795\n "
|
||||
"value_double: 0.000674405\n value_double: -0.000722742\n "
|
||||
"value_double: -9.32443e-05\n value_double: -0.00170917\n "
|
||||
"value_double: -0.000505279\n value_double: 0.000628132\n "
|
||||
"value_double: -0.00145929\n value_double: 0.00106077\n "
|
||||
"value_double: -0.000796743\n value_double: 0.000498275\n "
|
||||
"value_double: -0.0002914\n value_double: -0.00230622\n "
|
||||
"value_double: -9.42872e-05\n value_double: 0.000200359\n "
|
||||
"value_double: -0.00305027\n value_double: -0.0016218\n "
|
||||
"value_double: 0.00137126\n value_double: -0.00215436\n "
|
||||
"value_double: -0.000743827\n value_double: -0.00090007\n "
|
||||
"value_double: -0.000762207\n value_double: -0.000149951\n "
|
||||
"value_double: -0.0013102\n value_double: 0.00165781\n "
|
||||
"value_double: 0.000343809\n value_double: -0.000826069\n "
|
||||
"value_double: -4.67404e-05\n value_double: 0.0023931\n "
|
||||
"value_double: 0.00165338\n value_double: -0.00050529\n "
|
||||
"value_double: 0.000178771\n value_double: -0.000858287\n "
|
||||
"value_double: -0.00157031\n value_double: -0.00165846\n "
|
||||
"value_double: -0.000713672\n value_double: 0.00014357\n "
|
||||
"value_double: 0.00203632\n value_double: -0.0010973\n "
|
||||
"value_double: -9.89852e-05\n value_double: 0.000558808\n "
|
||||
"value_double: 0.00087211\n value_double: 0.000661239\n "
|
||||
"value_double: 0.000389605\n value_double: 0.00060653\n "
|
||||
"value_double: -0.000330104\n }\n}\nchildren {\n name: \"DW2\"\n "
|
||||
"exec_micros: 0\n requested_bytes: 0\n parameters: 288\n "
|
||||
"370\nchildren {\n name: \"conv2d/bias\"\n exec_micros: 0\n "
|
||||
"requested_bytes: 0\n parameters: 5\n total_exec_micros: 0\n "
|
||||
"total_requested_bytes: 0\n total_parameters: 5\n float_ops: 0\n "
|
||||
"total_float_ops: 0\n tensor_value {\n dtype: DT_FLOAT\n "
|
||||
"value_double: 0\n value_double: 0\n value_double: 0\n "
|
||||
"value_double: 0\n value_double: 0\n }\n}\nchildren {\n name: "
|
||||
"\"conv2d/kernel\"\n exec_micros: 0\n requested_bytes: 0\n "
|
||||
"parameters: 135\n total_exec_micros: 0\n total_requested_bytes: 0\n "
|
||||
"total_parameters: 135\n float_ops: 0\n total_float_ops: 0\n "
|
||||
"tensor_value {\n dtype: DT_FLOAT\n value_double: -0.113138\n "
|
||||
"value_double: 0.261431\n value_double: 0.215777\n value_double: "
|
||||
"0.24135\n value_double: -0.113195\n value_double: -0.212639\n "
|
||||
"value_double: -0.0907301\n value_double: 0.0221634\n "
|
||||
"value_double: 0.21821\n value_double: 0.22715\n value_double: "
|
||||
"-0.108698\n value_double: 0.240911\n value_double: -0.138626\n "
|
||||
"value_double: -0.144752\n value_double: -0.00962037\n "
|
||||
"value_double: 0.0971008\n value_double: 0.00264764\n "
|
||||
"value_double: -0.272929\n value_double: 0.0129845\n value_double: "
|
||||
"0.0466554\n value_double: -0.229184\n value_double: 0.153576\n "
|
||||
"value_double: -0.169218\n value_double: -0.112991\n value_double: "
|
||||
"0.205739\n value_double: 0.257844\n value_double: 0.107455\n "
|
||||
"value_double: -0.207914\n value_double: 0.15211\n value_double: "
|
||||
"0.277932\n value_double: 0.145986\n value_double: -0.0883989\n "
|
||||
"value_double: 0.167506\n value_double: 0.10237\n value_double: "
|
||||
"0.0542143\n value_double: 0.0334378\n value_double: 0.159489\n "
|
||||
"value_double: 0.246583\n value_double: 0.0154283\n value_double: "
|
||||
"0.0872411\n value_double: -0.25732\n value_double: 0.0499355\n "
|
||||
"value_double: 0.0266221\n value_double: 0.088801\n value_double: "
|
||||
"-0.0794552\n value_double: -0.00383255\n value_double: "
|
||||
"-0.165267\n value_double: 0.0271328\n value_double: 0.0729822\n "
|
||||
" value_double: 0.200795\n value_double: 0.100276\n value_double: "
|
||||
"0.285254\n value_double: -0.171945\n value_double: -0.0187411\n "
|
||||
" value_double: -0.218729\n value_double: 0.233753\n value_double: "
|
||||
"0.109184\n value_double: 0.247875\n value_double: -0.224632\n "
|
||||
"value_double: 0.0940739\n value_double: 0.00663087\n "
|
||||
"value_double: -0.075786\n value_double: -0.179992\n value_double: "
|
||||
"-0.276016\n value_double: 0.261207\n value_double: -0.0658191\n "
|
||||
" value_double: -0.0747132\n value_double: -0.0839638\n "
|
||||
"value_double: -0.0825393\n value_double: 0.0915958\n "
|
||||
"value_double: -0.195425\n value_double: -0.255836\n value_double: "
|
||||
"-0.08745\n value_double: -0.181623\n value_double: -0.235936\n "
|
||||
"value_double: 0.0205423\n value_double: 0.185447\n value_double: "
|
||||
"-0.0691599\n value_double: -0.0451089\n value_double: -0.153922\n "
|
||||
" value_double: -0.0279411\n value_double: 0.148915\n "
|
||||
"value_double: -0.018026\n value_double: -0.144903\n value_double: "
|
||||
"0.0370046\n value_double: 0.0764987\n value_double: 0.0586488\n "
|
||||
" value_double: -0.222919\n value_double: 0.0238447\n "
|
||||
"value_double: -0.106012\n value_double: -0.102202\n value_double: "
|
||||
"-0.159347\n value_double: -0.0232876\n value_double: 0.109855\n "
|
||||
" value_double: -0.141833\n value_double: 0.1376\n value_double: "
|
||||
"-0.12413\n value_double: -0.208968\n value_double: 0.0758635\n "
|
||||
"value_double: -0.217672\n value_double: -0.20153\n value_double: "
|
||||
"-0.195414\n value_double: -0.18549\n value_double: 0.00298014\n "
|
||||
" value_double: -0.279283\n value_double: 0.200084\n value_double: "
|
||||
"-0.0968328\n value_double: -0.243\n value_double: 0.239319\n "
|
||||
"value_double: -0.236288\n value_double: 0.169477\n value_double: "
|
||||
"0.126673\n value_double: 0.182215\n value_double: -0.028243\n "
|
||||
"value_double: 0.282762\n value_double: -0.165548\n value_double: "
|
||||
"-0.0641245\n value_double: -0.186382\n value_double: 0.0329038\n "
|
||||
" value_double: 0.271848\n value_double: 0.084653\n value_double: "
|
||||
"-0.108163\n value_double: 0.247094\n value_double: 0.192687\n "
|
||||
"value_double: 0.171922\n value_double: -0.187649\n value_double: "
|
||||
"0.251253\n value_double: 0.272077\n value_double: 0.19068\n "
|
||||
"value_double: 0.220352\n value_double: -0.255741\n value_double: "
|
||||
"0.110853\n value_double: 0.146625\n value_double: 0.167754\n "
|
||||
"value_double: 0.249554\n }\n}\nchildren {\n name: \"conv2d_1/bias\"\n "
|
||||
" exec_micros: 0\n requested_bytes: 0\n parameters: 5\n "
|
||||
"total_exec_micros: 0\n total_requested_bytes: 0\n total_parameters: "
|
||||
"288\n float_ops: 0\n total_float_ops: 0\n tensor_value {\n dtype: "
|
||||
"DT_FLOAT\n value_double: 0.000704577\n value_double: "
|
||||
"0.000127421\n value_double: 0.00105952\n value_double: "
|
||||
"0.000423765\n value_double: -0.00025461\n value_double: "
|
||||
"-0.000857203\n value_double: 0.000693494\n value_double: "
|
||||
"0.000282214\n value_double: 0.00106185\n value_double: "
|
||||
"-0.000836552\n value_double: -0.00116766\n value_double: "
|
||||
"0.000733674\n value_double: -0.000669601\n value_double: "
|
||||
"-0.000275175\n value_double: -0.000428215\n value_double: "
|
||||
"-0.000495715\n value_double: -0.000125887\n value_double: "
|
||||
"-0.000715204\n value_double: -0.00108936\n value_double: "
|
||||
"0.000738267\n value_double: 0.000376081\n value_double: "
|
||||
"0.00191442\n value_double: 0.001423\n value_double: -0.00093811\n "
|
||||
" value_double: -5.91421e-05\n value_double: -0.000221507\n "
|
||||
"value_double: -0.000104555\n value_double: -0.00069682\n "
|
||||
"value_double: -0.000278325\n value_double: -0.00122748\n "
|
||||
"value_double: -0.00112411\n value_double: -0.000440511\n "
|
||||
"value_double: -0.000392247\n value_double: -0.000419606\n "
|
||||
"value_double: -0.00167063\n value_double: -0.000988578\n "
|
||||
"value_double: -0.00040159\n value_double: 0.00238918\n "
|
||||
"value_double: -0.000892898\n value_double: -0.000875976\n "
|
||||
"value_double: 0.00154401\n value_double: -0.000719911\n "
|
||||
"value_double: 0.000753941\n value_double: -0.000119961\n "
|
||||
"value_double: -0.000305115\n value_double: 9.97947e-05\n "
|
||||
"value_double: -0.00128908\n value_double: -0.000584184\n "
|
||||
"value_double: -0.000734685\n value_double: -0.00146612\n "
|
||||
"value_double: 0.000670802\n value_double: 0.000924219\n "
|
||||
"value_double: -0.000154409\n value_double: 0.000198231\n "
|
||||
"value_double: -0.000340742\n value_double: -0.00159646\n "
|
||||
"value_double: -1.19382e-05\n value_double: 0.00165203\n "
|
||||
"value_double: 0.0017085\n value_double: -0.000199614\n "
|
||||
"value_double: 0.000529526\n value_double: 0.000769364\n "
|
||||
"value_double: 0.00135369\n value_double: 0.00132873\n "
|
||||
"value_double: 0.000451174\n value_double: 0.000255218\n "
|
||||
"value_double: 0.00102891\n value_double: -0.00160068\n "
|
||||
"value_double: 0.000324269\n value_double: -0.000492347\n "
|
||||
"value_double: 0.000925301\n value_double: 0.00281998\n "
|
||||
"value_double: -0.000826404\n value_double: -0.000602903\n "
|
||||
"value_double: 0.00126559\n value_double: 0.000924364\n "
|
||||
"value_double: -9.19827e-05\n value_double: -5.59275e-05\n "
|
||||
"value_double: 0.00107971\n value_double: -9.91756e-05\n "
|
||||
"value_double: 0.000864708\n value_double: 0.00121747\n "
|
||||
"value_double: 0.00146338\n value_double: 0.000186883\n "
|
||||
"value_double: -0.00168195\n value_double: -0.00062029\n "
|
||||
"value_double: 0.000658127\n value_double: 0.00115682\n "
|
||||
"value_double: -0.00178359\n value_double: 0.000685606\n "
|
||||
"value_double: -0.000503373\n value_double: -0.000312999\n "
|
||||
"value_double: 0.000335383\n value_double: -1.08597e-05\n "
|
||||
"value_double: -8.2499e-05\n value_double: -0.000469726\n "
|
||||
"value_double: -0.00170868\n value_double: 0.000118957\n "
|
||||
"value_double: -0.000460736\n value_double: -5.56372e-05\n "
|
||||
"value_double: -0.00110148\n value_double: 0.00059123\n "
|
||||
"value_double: 0.000386339\n value_double: -0.00139967\n "
|
||||
"value_double: -0.000835664\n value_double: 0.00103421\n "
|
||||
"value_double: -0.00104296\n value_double: -0.000687497\n "
|
||||
"value_double: 1.1338e-05\n value_double: 0.00176484\n "
|
||||
"value_double: 0.000531523\n value_double: -0.000986387\n "
|
||||
"value_double: -0.00114152\n value_double: 0.000256744\n "
|
||||
"value_double: 0.000228425\n value_double: 0.00116583\n "
|
||||
"value_double: 0.0002726\n value_double: -0.00100828\n "
|
||||
"value_double: -0.000950376\n value_double: -0.00229074\n "
|
||||
"value_double: -0.000348272\n value_double: -0.000526032\n "
|
||||
"value_double: -0.000133703\n value_double: 0.000310979\n "
|
||||
"value_double: -0.00199278\n value_double: -0.000874469\n "
|
||||
"value_double: -0.000631466\n value_double: 0.0010534\n "
|
||||
"value_double: 0.00134646\n value_double: -0.00172743\n "
|
||||
"value_double: 0.00131031\n value_double: -0.000697506\n "
|
||||
"value_double: 0.000286747\n value_double: 0.000140759\n "
|
||||
"value_double: 0.000568707\n value_double: 0.000108177\n "
|
||||
"value_double: -0.00207337\n value_double: -0.00138146\n "
|
||||
"value_double: 0.000483162\n value_double: -0.00167096\n "
|
||||
"value_double: -0.000465813\n value_double: 0.00067724\n "
|
||||
"value_double: 2.08388e-05\n value_double: -0.00203279\n "
|
||||
"value_double: 7.8429e-05\n value_double: 0.00161337\n "
|
||||
"value_double: -0.000269005\n value_double: 0.000217822\n "
|
||||
"value_double: 0.000599886\n value_double: 0.000317549\n "
|
||||
"value_double: 0.00146597\n value_double: -0.00210947\n "
|
||||
"value_double: -0.000823917\n value_double: -6.83766e-05\n "
|
||||
"value_double: 0.000656085\n value_double: 0.000117134\n "
|
||||
"value_double: -0.000390405\n value_double: 2.39565e-05\n "
|
||||
"value_double: 0.00104837\n value_double: -0.000563671\n "
|
||||
"value_double: 0.000634073\n value_double: -0.000554531\n "
|
||||
"value_double: 0.000677971\n value_double: -0.000596207\n "
|
||||
"value_double: -0.00103335\n value_double: 0.000645199\n "
|
||||
"value_double: 0.00162195\n value_double: 0.000239246\n "
|
||||
"value_double: 0.00113519\n value_double: 0.000787431\n "
|
||||
"value_double: -0.000471688\n value_double: -0.000216625\n "
|
||||
"value_double: -0.000537156\n value_double: 0.000551816\n "
|
||||
"value_double: 0.00094337\n value_double: -0.000708127\n "
|
||||
"value_double: 0.000956955\n value_double: -0.000904936\n "
|
||||
"value_double: -0.000424413\n value_double: 0.000106455\n "
|
||||
"value_double: -0.000443952\n value_double: 0.000185436\n "
|
||||
"value_double: 0.000944397\n value_double: -0.000760572\n "
|
||||
"value_double: 0.000560002\n value_double: 4.09886e-05\n "
|
||||
"value_double: -0.00075076\n value_double: -0.000701856\n "
|
||||
"value_double: -0.000234851\n value_double: -0.000131515\n "
|
||||
"value_double: -0.000761718\n value_double: -0.000267808\n "
|
||||
"value_double: -0.00039682\n value_double: 0.000542953\n "
|
||||
"value_double: -0.000817685\n value_double: 0.00103851\n "
|
||||
"value_double: -0.000427176\n value_double: 0.000517784\n "
|
||||
"value_double: -0.000823552\n value_double: -0.000742637\n "
|
||||
"value_double: 0.000529213\n value_double: -0.000372805\n "
|
||||
"value_double: 1.85745e-05\n value_double: 0.00139891\n "
|
||||
"value_double: -0.000128417\n value_double: -0.000404316\n "
|
||||
"value_double: -0.000671571\n value_double: 0.000490311\n "
|
||||
"value_double: -0.00118493\n value_double: -0.000897118\n "
|
||||
"value_double: 0.000939601\n value_double: 0.000376399\n "
|
||||
"value_double: 0.0014709\n value_double: 0.000134806\n "
|
||||
"value_double: -0.000294469\n value_double: -0.000569142\n "
|
||||
"value_double: 0.00127266\n value_double: -0.00140936\n "
|
||||
"value_double: 0.000870083\n value_double: 0.000287246\n "
|
||||
"value_double: 0.000537685\n value_double: 0.000125569\n "
|
||||
"value_double: 0.000360276\n value_double: -0.000186268\n "
|
||||
"value_double: 0.0011141\n value_double: -0.000605185\n "
|
||||
"value_double: -0.0016281\n value_double: -0.000552758\n "
|
||||
"value_double: -0.000196755\n value_double: -0.00265188\n "
|
||||
"value_double: 0.000480997\n value_double: 0.00018776\n "
|
||||
"value_double: -0.00199234\n value_double: 0.000959982\n "
|
||||
"value_double: 0.00040334\n value_double: -0.000693596\n "
|
||||
"value_double: 0.00157678\n value_double: -0.00134499\n "
|
||||
"value_double: 0.00121909\n value_double: -0.000328734\n "
|
||||
"value_double: 0.000148554\n value_double: -0.000209509\n "
|
||||
"value_double: -0.000266303\n value_double: -0.00134084\n "
|
||||
"value_double: 5.21371e-05\n value_double: 0.0005329\n "
|
||||
"value_double: -0.000168858\n value_double: -0.00074875\n "
|
||||
"value_double: 0.000959397\n value_double: -0.00159476\n "
|
||||
"value_double: -0.000368838\n value_double: 0.0006077\n "
|
||||
"value_double: -0.00117243\n value_double: -0.00146013\n "
|
||||
"value_double: 0.00031519\n value_double: -0.000167911\n "
|
||||
"value_double: 0.000482571\n value_double: -0.000752268\n "
|
||||
"value_double: -0.00042363\n value_double: 0.00121219\n "
|
||||
"value_double: -0.000208159\n value_double: 0.000128531\n "
|
||||
"value_double: -0.000406308\n value_double: -0.000242663\n "
|
||||
"value_double: -3.96673e-05\n value_double: 0.00144854\n "
|
||||
"value_double: -0.000787328\n value_double: -0.000401958\n "
|
||||
"value_double: 0.00114091\n value_double: -0.000739546\n "
|
||||
"value_double: 0.000483236\n value_double: -0.000916945\n "
|
||||
"value_double: -0.00129577\n value_double: -0.00186504\n "
|
||||
"value_double: 0.000806804\n value_double: -0.000152251\n "
|
||||
"value_double: 0.000662576\n value_double: -0.000533236\n "
|
||||
"value_double: 0.00151019\n value_double: 0.00127805\n "
|
||||
"value_double: 0.00115399\n value_double: -0.00130876\n "
|
||||
"value_double: 2.99457e-06\n value_double: 0.000820777\n "
|
||||
"value_double: 0.000878393\n value_double: -0.000562642\n "
|
||||
"value_double: -0.00070442\n value_double: -0.00066277\n "
|
||||
"}\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
"5\n float_ops: 0\n total_float_ops: 0\n tensor_value {\n dtype: "
|
||||
"DT_FLOAT\n value_double: 0\n value_double: 0\n value_double: "
|
||||
"0\n value_double: 0\n value_double: 0\n }\n}\nchildren {\n "
|
||||
"name: \"conv2d_1/kernel\"\n exec_micros: 0\n requested_bytes: 0\n "
|
||||
"parameters: 225\n total_exec_micros: 0\n total_requested_bytes: 0\n "
|
||||
"total_parameters: 225\n float_ops: 0\n total_float_ops: 0\n "
|
||||
"tensor_value {\n dtype: DT_FLOAT\n value_double: -0.00170514\n "
|
||||
"value_double: 0.138601\n value_double: -0.224822\n value_double: "
|
||||
"-0.0848449\n value_double: 0.170551\n value_double: 0.147666\n "
|
||||
"value_double: -0.0570606\n value_double: -0.132805\n "
|
||||
"value_double: -0.172013\n value_double: 0.249707\n value_double: "
|
||||
"0.149734\n value_double: 0.0365986\n value_double: -0.0923146\n "
|
||||
" value_double: -0.17745\n value_double: -0.169978\n value_double: "
|
||||
"-0.173298\n value_double: -0.110407\n value_double: 0.1469\n "
|
||||
"value_double: 0.0419576\n value_double: 0.0391093\n value_double: "
|
||||
"-0.137381\n value_double: 0.212642\n value_double: -0.067034\n "
|
||||
"value_double: -0.0727709\n value_double: -0.0276531\n "
|
||||
"value_double: 0.218212\n value_double: 0.0596479\n value_double: "
|
||||
"-0.0468102\n value_double: -0.0250467\n value_double: -0.20391\n "
|
||||
" value_double: -0.233801\n value_double: 0.135615\n "
|
||||
"value_double: -0.182124\n value_double: 0.254205\n value_double: "
|
||||
"0.0819146\n value_double: -0.146696\n value_double: -0.20095\n "
|
||||
"value_double: -0.250555\n value_double: -0.226406\n value_double: "
|
||||
"0.0421331\n value_double: 0.0361264\n value_double: -0.188558\n "
|
||||
" value_double: -0.0222711\n value_double: -0.128226\n "
|
||||
"value_double: -0.148305\n value_double: -0.137598\n value_double: "
|
||||
"-0.041647\n value_double: -0.0574933\n value_double: 0.122506\n "
|
||||
" value_double: 0.0415936\n value_double: 0.244957\n value_double: "
|
||||
"0.00372121\n value_double: -0.139939\n value_double: 0.250411\n "
|
||||
" value_double: -0.23848\n value_double: -0.0717569\n "
|
||||
"value_double: -0.00884159\n value_double: 0.135616\n "
|
||||
"value_double: -0.0493895\n value_double: 0.254308\n value_double: "
|
||||
"-0.181419\n value_double: -0.114829\n value_double: -0.172638\n "
|
||||
" value_double: 0.06984\n value_double: -0.086704\n value_double: "
|
||||
"0.168515\n value_double: -0.152275\n value_double: -0.230775\n "
|
||||
"value_double: -0.254366\n value_double: -0.115397\n value_double: "
|
||||
"0.0418207\n value_double: -0.199607\n value_double: -0.167001\n "
|
||||
" value_double: -0.187238\n value_double: 0.0196097\n "
|
||||
"value_double: 0.201653\n value_double: -0.143758\n value_double: "
|
||||
"0.167187\n value_double: -0.129141\n value_double: 0.230154\n "
|
||||
"value_double: -0.119968\n value_double: -0.121843\n value_double: "
|
||||
"-0.0118565\n value_double: 0.0285747\n value_double: -0.0593699\n "
|
||||
" value_double: -0.175214\n value_double: -0.211524\n "
|
||||
"value_double: 0.167042\n value_double: -0.216357\n value_double: "
|
||||
"-0.0218886\n value_double: -0.244211\n value_double: 0.175301\n "
|
||||
" value_double: 0.0654932\n value_double: -0.0419763\n "
|
||||
"value_double: -0.103275\n value_double: -0.0848433\n "
|
||||
"value_double: -0.0845421\n value_double: -0.00269318\n "
|
||||
"value_double: -0.145978\n value_double: -0.217061\n value_double: "
|
||||
"-0.0937043\n value_double: 0.235796\n value_double: -0.0893372\n "
|
||||
" value_double: 0.000827968\n value_double: 0.0172743\n "
|
||||
"value_double: -0.234205\n value_double: -0.0867703\n "
|
||||
"value_double: 0.131704\n value_double: 0.134143\n value_double: "
|
||||
"-0.162257\n value_double: -0.129706\n value_double: 0.0763288\n "
|
||||
" value_double: 0.156988\n value_double: 0.220033\n value_double: "
|
||||
"-0.179884\n value_double: 0.066697\n value_double: 0.212322\n "
|
||||
"value_double: -0.0961226\n value_double: -0.11223\n value_double: "
|
||||
"0.249944\n value_double: 0.115673\n value_double: -0.100203\n "
|
||||
"value_double: 0.125645\n value_double: -0.256104\n value_double: "
|
||||
"0.0996534\n value_double: 0.167306\n value_double: -0.00700775\n "
|
||||
" value_double: 0.242145\n value_double: 0.088406\n value_double: "
|
||||
"0.0975334\n value_double: -0.0309525\n value_double: -0.0422794\n "
|
||||
" value_double: 0.20739\n value_double: 0.113992\n value_double: "
|
||||
"0.253818\n value_double: -0.0857835\n value_double: 0.223902\n "
|
||||
"value_double: 0.10291\n value_double: 0.103091\n value_double: "
|
||||
"-0.177502\n value_double: -0.0258242\n value_double: -0.130567\n "
|
||||
" value_double: -0.15999\n value_double: -0.101484\n "
|
||||
"value_double: 0.0188813\n value_double: 0.160626\n value_double: "
|
||||
"0.0467491\n value_double: 0.193634\n value_double: -0.0910993\n "
|
||||
" value_double: 0.0440249\n value_double: -0.255389\n "
|
||||
"value_double: -0.240244\n value_double: -0.213171\n value_double: "
|
||||
"0.175978\n value_double: -0.0251202\n value_double: 0.0943941\n "
|
||||
" value_double: -0.196194\n value_double: 0.163395\n value_double: "
|
||||
"-0.010777\n value_double: -0.0626751\n value_double: -0.246234\n "
|
||||
" value_double: 0.0662063\n value_double: 0.120589\n "
|
||||
"value_double: 0.237322\n value_double: 0.0849243\n value_double: "
|
||||
"-0.066591\n value_double: 0.0512236\n value_double: -0.144309\n "
|
||||
" value_double: -0.235415\n value_double: -0.0565311\n "
|
||||
"value_double: 0.0882529\n value_double: -0.215923\n value_double: "
|
||||
"-0.0873292\n value_double: -0.0691103\n value_double: "
|
||||
"-0.00238678\n value_double: 0.147789\n value_double: -0.124451\n "
|
||||
" value_double: 0.205044\n value_double: -0.0596834\n "
|
||||
"value_double: 0.0268479\n value_double: 0.0857448\n value_double: "
|
||||
"-0.0923855\n value_double: -0.0960547\n value_double: 0.169869\n "
|
||||
" value_double: 0.16988\n value_double: -0.032271\n value_double: "
|
||||
"-0.120731\n value_double: -0.199086\n value_double: 0.181199\n "
|
||||
"value_double: 0.00897732\n value_double: -0.257469\n "
|
||||
"value_double: -0.135556\n value_double: -0.149663\n value_double: "
|
||||
"-0.00990398\n value_double: 0.221165\n value_double: 0.0327134\n "
|
||||
" value_double: -0.0392821\n value_double: -0.0614503\n "
|
||||
"value_double: 0.246602\n value_double: -0.171692\n value_double: "
|
||||
"-0.150835\n value_double: -0.13854\n value_double: -0.244668\n "
|
||||
"value_double: 0.0790781\n value_double: 0.212678\n value_double: "
|
||||
"0.0782059\n value_double: -0.177888\n value_double: -0.165914\n "
|
||||
" value_double: -0.164251\n value_double: 0.165007\n value_double: "
|
||||
"0.239615\n value_double: -0.217642\n value_double: -0.219843\n "
|
||||
"value_double: 0.0828398\n value_double: 0.00272235\n "
|
||||
"value_double: -0.0323662\n value_double: -0.255953\n "
|
||||
"value_double: 0.237298\n value_double: -0.0896481\n value_double: "
|
||||
"-0.0605349\n value_double: 0.231679\n value_double: -0.123842\n "
|
||||
" value_double: 0.0858642\n value_double: 0.23111\n value_double: "
|
||||
"0.0491742\n }\n}\nfloat_ops: 0\ntotal_float_ops: 0\n",
|
||||
&expected));
|
||||
EXPECT_EQ(expected.DebugString(), root.DebugString());
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user