Skip to content

Commit 9592468

Browse files
authored
Fix gcc4.9 (#6442)
* Fix compiling error of gcc4.9. * Refine the check of cxx compiler flags in api/CMakeLists.txt.
1 parent 74ead9d commit 9592468

File tree

11 files changed

+118
-71
lines changed

11 files changed

+118
-71
lines changed

paddle/api/CMakeLists.txt

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,18 @@ FILE(GLOB PY_PADDLE_PYTHON_FILES ${PADDLE_SOURCE_DIR}/paddle/py_paddle/*.py)
2525

2626
SET_SOURCE_FILES_PROPERTIES(Paddle.i PROPERTIES CPLUSPLUS ON)
2727

28+
SET(SWIG_NEED_FLAGS
29+
-ftls-model=global-dynamic
30+
-Wno-parentheses-equality
31+
-Wno-self-assign
32+
-Wno-maybe-uninitialized
33+
-Wno-missing-field-initializers)
34+
FOREACH(flag ${SWIG_NEED_FLAGS})
35+
safe_set_cxxflag(SWIG_CXX_FLAGS ${flag})
36+
ENDFOREACH()
37+
2838
SET(CMAKE_SWIG_OUTDIR ${CMAKE_CURRENT_BINARY_DIR})
29-
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-parentheses-equality -Wno-missing-field-initializers -Wno-self-assign -ftls-model=global-dynamic")
39+
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SWIG_CXX_FLAGS}")
3040

3141
SET(SWIG_MODULE_swig_paddle_EXTRA_DEPS
3242
paddle_parameter

paddle/framework/backward.cc

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -190,8 +190,9 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
190190
// collect all the offset for each alias,
191191
// insert a sum operator to add all aliases to output
192192
insert_position.push_back(
193-
{dup_op.back(), OpRegistry::CreateOp("sum", {{"X", dup_outputs}},
194-
{{"Out", {name}}}, {})});
193+
{dup_op.back(),
194+
OpRegistry::CreateOp("sum", {{"X", dup_outputs}}, {{"Out", {name}}},
195+
AttributeMap{})});
195196
}
196197

197198
// make sure the inserted `sum` ops follow the BFS order.
@@ -216,7 +217,8 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
216217
// If part of input gradient of that operator is not calculated, fill
217218
// zero variables to that input gradient.
218219
net->AppendOp(OpRegistry::CreateOp("fill_zeros_like", {{"X", {prefix}}},
219-
{{"Y", {grad_input}}}, {}));
220+
{{"Y", {grad_input}}},
221+
AttributeMap{}));
220222
}
221223
return false;
222224
});
@@ -392,8 +394,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
392394
0, in_name.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
393395
std::string new_name = prefix + kZeroVarSuffix;
394396
desc->Rename(in_name, new_name);
395-
std::unique_ptr<OpDescBind> fill_zeros_op(new OpDescBind(
396-
"fill_zeros_like", {{"X", {prefix}}}, {{"Y", {new_name}}}, {}));
397+
std::unique_ptr<OpDescBind> fill_zeros_op(
398+
new OpDescBind("fill_zeros_like", {{"X", {prefix}}},
399+
{{"Y", {new_name}}}, AttributeMap{}));
397400
pending_fill_zeros_ops.push_back(std::move(fill_zeros_op));
398401
}
399402
}
@@ -483,8 +486,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
483486
sum_op_inputs.emplace_back(new_name);
484487
next_g_name = sum_op_inputs.back();
485488
}
486-
std::unique_ptr<OpDescBind> sum_op(new OpDescBind(
487-
"sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}}, {}));
489+
std::unique_ptr<OpDescBind> sum_op(
490+
new OpDescBind("sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}},
491+
AttributeMap{}));
488492
pending_sum_ops.push_back({dup_op.back(), std::move(sum_op)});
489493
}
490494
}

paddle/framework/backward_test.cc

Lines changed: 40 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -106,15 +106,15 @@ class FcOp : public operators::NetOp {
106106
FcOp(const std::string &type, const VariableNameMap &inputs,
107107
const VariableNameMap &outputs, const AttributeMap &attrs)
108108
: NetOp(type, inputs, outputs, attrs) {
109-
AppendOp(OpRegistry::CreateOp("mul",
110-
{{"X", {Input("X")}}, {"Y", {Input("W")}}},
111-
{{"Out", {Output("mul_result")}}}, {}));
109+
AppendOp(OpRegistry::CreateOp(
110+
"mul", {{"X", {Input("X")}}, {"Y", {Input("W")}}},
111+
{{"Out", {Output("mul_result")}}}, AttributeMap{}));
112112
auto input_b = Inputs("b");
113113
std::string before_act = "mul_result";
114114
if (input_b.size() != 0) {
115115
AppendOp(OpRegistry::CreateOp(
116116
"rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
117-
{{"Out", {Output("add_result")}}}, {}));
117+
{{"Out", {Output("add_result")}}}, AttributeMap{}));
118118
before_act = "add_result";
119119
} else {
120120
auto out_varname = Output("add_result");
@@ -124,7 +124,7 @@ class FcOp : public operators::NetOp {
124124
}
125125

126126
AppendOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
127-
{{"Out", {Output("Out")}}}, {}));
127+
{{"Out", {Output("Out")}}}, AttributeMap{}));
128128
CompleteAddOp(false);
129129
}
130130
};
@@ -278,8 +278,9 @@ REGISTER_OPERATOR(scale, f::NoneOp);
278278
REGISTER_OP_CPU_KERNEL(scale, f::NoneKernel<paddle::platform::CPUPlace, float>);
279279

280280
TEST(Backward, simple_op_not_need_grad) {
281-
auto fwd = f::OpRegistry::CreateOp(
282-
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
281+
auto fwd =
282+
f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
283+
{{"Out", {"out"}}}, f::AttributeMap{});
283284
ASSERT_NE(fwd, nullptr);
284285
auto gop = f::Backward(*fwd, {"x"});
285286
ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);
@@ -296,9 +297,10 @@ TEST(Backward, net_fc_backward_normal) {
296297
{{"mul_result", {"mul_res"}},
297298
{"add_result", {"add_re"}},
298299
{"Out", {"out"}}},
299-
{});
300+
f::AttributeMap{});
300301
ASSERT_NE(fwd, nullptr);
301-
std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
302+
std::shared_ptr<f::OperatorBase> gop =
303+
f::Backward(*fwd, std::unordered_set<std::string>{});
302304
ASSERT_TRUE(gop->IsNetOp());
303305
auto net = static_cast<ops::NetOp *>(gop.get());
304306

@@ -322,9 +324,10 @@ TEST(Backward, net_fc_backward_not_have_b) {
322324
{{"mul_result", {"mul_res"}},
323325
{"add_result", {"add_res"}},
324326
{"Out", {"tmp"}}},
325-
{});
327+
f::AttributeMap{});
326328
ASSERT_NE(fwd, nullptr);
327-
std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
329+
std::shared_ptr<f::OperatorBase> gop =
330+
f::Backward(*fwd, std::unordered_set<std::string>{});
328331
ASSERT_TRUE(gop->IsNetOp());
329332
auto net = static_cast<ops::NetOp *>(gop.get());
330333

@@ -346,13 +349,13 @@ TEST(Backward, net_input_of_network_not_need_grad) {
346349
{{"mul_result", {"mul_tmp_0"}},
347350
{"add_result", {"add_tmp_0"}},
348351
{"Out", {"hidden0"}}},
349-
{}));
352+
f::AttributeMap{}));
350353
net.AppendOp(f::OpRegistry::CreateOp(
351354
"fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
352355
{{"mul_result", {"mul_tmp_1"}},
353356
{"add_result", {"add_tmp_1"}},
354357
{"Out", {"hidden1"}}},
355-
{}));
358+
f::AttributeMap{}));
356359
net.CompleteAddOp();
357360
auto bwd = Backward(net, {"x"}); // x@GRAD is not need.
358361
ASSERT_TRUE(bwd->IsNetOp());
@@ -381,39 +384,43 @@ TEST(Backward, net_input_of_network_not_need_grad) {
381384
TEST(Backward, net_shared_weight) {
382385
ops::NetOp net;
383386
net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
384-
{{"Out", {"out"}}}, {}));
387+
{{"Out", {"out"}}}, f::AttributeMap{}));
385388
net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
386-
{{"Out", {"FinalOut"}}}, {}));
389+
{{"Out", {"FinalOut"}}},
390+
f::AttributeMap{}));
387391
net.CompleteAddOp();
388392

389-
auto bwd = f::Backward(net, {});
393+
auto bwd = f::Backward(net, std::unordered_set<std::string>{});
390394
ASSERT_TRUE(bwd->IsNetOp());
391395
auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
392396
ASSERT_EQ(3UL, bwd_net->ops_.size());
393397
ASSERT_EQ("sum", bwd_net->ops_[2]->Type());
394398
}
395399

396400
TEST(Backward, op_all_input_are_not_need) {
397-
auto fwd = f::OpRegistry::CreateOp(
398-
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
401+
auto fwd =
402+
f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
403+
{{"Out", {"out"}}}, f::AttributeMap{});
399404
auto backward = f::Backward(*fwd, {"x", "b"});
400405
ASSERT_TRUE(backward->IsNetOp());
401406
auto net = static_cast<ops::NetOp *>(backward.get());
402407
ASSERT_TRUE(net->ops_.empty());
403408
}
404409

405410
TEST(Backward, op_all_output_are_not_need) {
406-
auto fwd = f::OpRegistry::CreateOp(
407-
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
411+
auto fwd =
412+
f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
413+
{{"Out", {"out"}}}, f::AttributeMap{});
408414
auto backward = f::Backward(*fwd, {"out"});
409415
ASSERT_TRUE(backward->IsNetOp());
410416
auto net = static_cast<ops::NetOp *>(backward.get());
411417
ASSERT_TRUE(net->ops_.empty());
412418
}
413419

414420
TEST(Backward, op_part_of_output_are_not_need) {
415-
auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
416-
{{"y", {"Y"}}, {"z", {"Z"}}}, {});
421+
auto fwd =
422+
f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
423+
{{"y", {"Y"}}, {"z", {"Z"}}}, f::AttributeMap{});
417424
auto backward = f::Backward(*fwd, {"Z"});
418425
ASSERT_TRUE(backward->IsNetOp());
419426
auto net = static_cast<ops::NetOp *>(backward.get());
@@ -437,7 +444,7 @@ TEST(Backward, op_part_of_output_are_not_need) {
437444

438445
TEST(Backward, op_part_of_input_are_not_need) {
439446
auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
440-
{{"Out", {"out"}}}, {});
447+
{{"Out", {"out"}}}, f::AttributeMap{});
441448
auto backward = f::Backward(*fwd, {"a"});
442449
auto &grad_mul = *backward;
443450
ASSERT_EQ(grad_mul.Type(), "mul_grad");
@@ -458,19 +465,19 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
458465
{{"mul_result", {"mul_out1"}},
459466
{"add_result", {"add_out1"}},
460467
{"Out", {"out1"}}},
461-
{}));
468+
f::AttributeMap{}));
462469
net.AppendOp(f::OpRegistry::CreateOp(
463470
"fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
464471
{{"mul_result", {"mul_out2"}},
465472
{"add_result", {"tmp_out2"}},
466473
{"Out", {"out2"}}},
467-
{}));
474+
f::AttributeMap{}));
468475
net.AppendOp(f::OpRegistry::CreateOp(
469476
"fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
470477
{{"mul_result", {"mul_out3"}},
471478
{"add_result", {"tmp_out3"}},
472479
{"Out", {"out3"}}},
473-
{}));
480+
f::AttributeMap{}));
474481
net.CompleteAddOp();
475482

476483
auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
@@ -509,7 +516,8 @@ TEST(Backward, simple_single_op) {
509516

510517
auto target = f::VarDescBind("out");
511518
target.SetShape({1});
512-
auto var_to_grad = AppendBackward(program, target, {});
519+
auto var_to_grad =
520+
AppendBackward(program, target, std::unordered_set<std::string>{});
513521

514522
ASSERT_EQ(block->AllOps().size(), 3UL);
515523
f::OpDescBind *fill_op = block->AllOps()[1];
@@ -546,7 +554,7 @@ TEST(Backward, default_attribute) {
546554

547555
auto target = f::VarDescBind("out");
548556
target.SetShape({1});
549-
AppendBackward(program, target, {});
557+
AppendBackward(program, target, std::unordered_set<std::string>{});
550558

551559
ASSERT_EQ(block->AllOps().size(), 3UL);
552560
EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1);
@@ -585,7 +593,8 @@ TEST(Backward, simple_mult_op) {
585593
auto target = f::VarDescBind("out3");
586594
target.SetShape({1});
587595
size_t forward_len = block->AllOps().size();
588-
auto var_to_grad = AppendBackward(program, target, {});
596+
auto var_to_grad =
597+
AppendBackward(program, target, std::unordered_set<std::string>{});
589598

590599
ASSERT_EQ(block->AllOps().size(), 6UL + 1);
591600
f::OpDescBind *fill_op = block->AllOps()[forward_len];
@@ -817,7 +826,8 @@ TEST(Backward, shared_var) {
817826
auto target = f::VarDescBind("out3");
818827
target.SetShape({1});
819828
size_t forward_len = block->AllOps().size();
820-
auto var_to_grad = AppendBackward(program, target, {});
829+
auto var_to_grad =
830+
AppendBackward(program, target, std::unordered_set<std::string>{});
821831

822832
ASSERT_EQ(block->AllOps().size(), 8UL);
823833
f::OpDescBind *fill_op = block->AllOps()[forward_len];

paddle/framework/op_desc.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -316,8 +316,8 @@ static void InitInferShapeFuncs() {
316316
for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
317317
auto op_type = kern_pair.first;
318318
auto &op_info = info_map.at(op_type);
319-
auto op =
320-
static_cast<OperatorWithKernel *>(op_info.Creator()("", {}, {}, {}));
319+
auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
320+
"", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
321321
if (op_info.infer_shape_) { // infer_shape has been registered.
322322
continue;
323323
}

paddle/framework/operator_test.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,9 @@ class OperatorClone : public paddle::framework::OperatorBase {
261261
};
262262

263263
TEST(Operator, Clone) {
264-
OperatorClone a("ABC", {}, {}, {});
264+
OperatorClone a("ABC", paddle::framework::VariableNameMap{},
265+
paddle::framework::VariableNameMap{},
266+
paddle::framework::AttributeMap{});
265267
auto b = a.Clone();
266268
ASSERT_EQ(a.Type(), b->Type());
267269
}

paddle/framework/prune_test.cc

Lines changed: 29 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,8 @@ TEST(Prune, one_operator) {
5454
f::ProgramDescBind program;
5555
f::BlockDescBind *block = program.MutableBlock(0);
5656

57-
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, {}, block);
57+
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, f::AttributeMap{},
58+
block);
5859

5960
f::ProgramDesc *pdesc = program.Proto();
6061
f::ProgramDesc pruned;
@@ -71,10 +72,14 @@ TEST(Prune, forward) {
7172
f::ProgramDescBind program;
7273
f::BlockDescBind *block = program.MutableBlock(0);
7374

74-
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, {}, block);
75-
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"c"}}}, {}, block);
76-
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"d"}}}, {}, block);
77-
AddOp("one_one", {{"input", {"d"}}}, {{"output", {"e"}}}, {}, block);
75+
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, f::AttributeMap{},
76+
block);
77+
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"c"}}}, f::AttributeMap{},
78+
block);
79+
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"d"}}}, f::AttributeMap{},
80+
block);
81+
AddOp("one_one", {{"input", {"d"}}}, {{"output", {"e"}}}, f::AttributeMap{},
82+
block);
7883

7984
f::ProgramDesc *pdesc = program.Proto();
8085

@@ -90,11 +95,14 @@ TEST(Prune, multi_input_op) {
9095
f::ProgramDescBind program;
9196
f::BlockDescBind *block = program.MutableBlock(0);
9297

93-
AddOp("one_one", {{"input", {"a0"}}}, {{"output", {"b0"}}}, {}, block);
94-
AddOp("one_one", {{"input", {"a1"}}}, {{"output", {"b1"}}}, {}, block);
95-
AddOp("one_one", {{"input", {"a2"}}}, {{"output", {"b2"}}}, {}, block);
96-
AddOp("three_one", {{"input", {"b0", "b1", "b2"}}}, {{"output", {"c"}}}, {},
98+
AddOp("one_one", {{"input", {"a0"}}}, {{"output", {"b0"}}}, f::AttributeMap{},
99+
block);
100+
AddOp("one_one", {{"input", {"a1"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
97101
block);
102+
AddOp("one_one", {{"input", {"a2"}}}, {{"output", {"b2"}}}, f::AttributeMap{},
103+
block);
104+
AddOp("three_one", {{"input", {"b0", "b1", "b2"}}}, {{"output", {"c"}}},
105+
f::AttributeMap{}, block);
98106

99107
f::ProgramDesc *pdesc = program.Proto();
100108
pdesc->mutable_blocks(0)->mutable_ops(3)->set_is_target(true);
@@ -108,9 +116,12 @@ TEST(Prune, multi_output_op) {
108116
f::ProgramDescBind program;
109117
f::BlockDescBind *block = program.MutableBlock(0);
110118

111-
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}}, {}, block);
112-
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, {}, block);
113-
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, {}, block);
119+
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}},
120+
f::AttributeMap{}, block);
121+
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
122+
block);
123+
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, f::AttributeMap{},
124+
block);
114125

115126
f::ProgramDesc *pdesc = program.Proto();
116127
pdesc->mutable_blocks(0)->mutable_ops(2)->set_is_target(true);
@@ -124,9 +135,12 @@ TEST(Prune, multi_target) {
124135
f::ProgramDescBind program;
125136
f::BlockDescBind *block = program.MutableBlock(0);
126137

127-
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}}, {}, block);
128-
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, {}, block);
129-
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, {}, block);
138+
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}},
139+
f::AttributeMap{}, block);
140+
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
141+
block);
142+
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, f::AttributeMap{},
143+
block);
130144

131145
f::ProgramDesc *pdesc = program.Proto();
132146
pdesc->mutable_blocks(0)->mutable_ops(1)->set_is_target(true);

paddle/operators/conditional_block_op.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,9 @@ class ConditionalBlockGradOp : public ConditionalOp {
142142
continue;
143143
}
144144
auto new_in_grad_name = cur_scope.Rename(in_grad_name);
145-
auto assign =
146-
framework::OpRegistry::CreateOp("assign", {{"X", {new_in_grad_name}}},
147-
{{"Out", {out_grad_name}}}, {});
145+
auto assign = framework::OpRegistry::CreateOp(
146+
"assign", {{"X", {new_in_grad_name}}}, {{"Out", {out_grad_name}}},
147+
framework::AttributeMap{});
148148
assign->Run(cur_scope, dev_ctx);
149149
cur_scope.Rename(new_in_grad_name, in_grad_name);
150150
}

paddle/operators/net_op.h

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,10 @@ namespace operators {
3838
class NetOp : public framework::OperatorBase {
3939
public:
4040
static const char kAll[];
41-
NetOp() : framework::OperatorBase("plain_net", {}, {}, {}) {}
41+
NetOp()
42+
: framework::OperatorBase("plain_net", framework::VariableNameMap{},
43+
framework::VariableNameMap{},
44+
framework::AttributeMap{}) {}
4245

4346
NetOp(const std::string& type, const framework::VariableNameMap& inputs,
4447
const framework::VariableNameMap& outputs,

0 commit comments

Comments
 (0)