Skip to content

Commit ba97194

Browse files
authored
Merge pull request #10165 from JiayiFeng/fix_Clang_compile_error
fix Clang compile errors
2 parents 2c8fe4e + 87f9191 commit ba97194

File tree

6 files changed

+19
-21
lines changed

6 files changed

+19
-21
lines changed

paddle/gserver/dataproviders/PyDataProvider2.cpp

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -390,9 +390,7 @@ class PyDataProvider2 : public DataProvider {
390390

391391
if (this->loadThread_) { // wait poolActualSize < poolSize;
392392
std::unique_lock<std::mutex> l(mtx_);
393-
pushCV_.wait(l, [this, additionalBatchSize] {
394-
return this->poolActualSize_ < poolSize_;
395-
});
393+
pushCV_.wait(l, [this] { return this->poolActualSize_ < poolSize_; });
396394
}
397395

398396
{

paddle/gserver/gradientmachines/MultiGradientMachine.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ MultiGradientMachine::MultiGradientMachine(const ModelConfig& config,
5252
} else {
5353
numDevices_ = 0;
5454
}
55-
ParamInitCallback mainParamInitCb = [this](int paramId, Parameter* para) {
55+
ParamInitCallback mainParamInitCb = [](int paramId, Parameter* para) {
5656
// only create buf for CPU parameters
5757
// GPU parameters will be created in each thread
5858
if (para->useGpu()) return;

paddle/gserver/layers/RecurrentLayerGroup.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ void RecurrentLayerGroup::initSubNetwork(
7272
setNeedGradient(true);
7373

7474
network_.reset(new RecurrentGradientMachine(config_.name(), rootNetwork));
75-
ParamInitCallback cb = [this, rootNetwork](int paramId, Parameter* para) {
75+
ParamInitCallback cb = [rootNetwork](int paramId, Parameter* para) {
7676
para->enableSharedType(
7777
PARAMETER_VALUE,
7878
rootNetwork->getParameters()[paramId]->getBuf(PARAMETER_VALUE),

paddle/parameter/Argument.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -325,12 +325,12 @@ void Argument::concat(const std::vector<Argument>& args,
325325
->copyFrom(*src->subVec(srcStartRow, size), stream);
326326
};
327327

328-
auto copyStrs = [batchSize, stream](SVectorPtr& dst,
329-
const SVectorPtr& src,
330-
int desStartRow,
331-
int srcStartRow,
332-
int size,
333-
bool useGpu) {
328+
auto copyStrs = [batchSize](SVectorPtr& dst,
329+
const SVectorPtr& src,
330+
int desStartRow,
331+
int srcStartRow,
332+
int size,
333+
bool useGpu) {
334334
if (!src) {
335335
dst.reset();
336336
return;
@@ -413,7 +413,7 @@ void Argument::concat(const std::vector<Argument>& args,
413413
dst->subVec(startRow, src->getSize())->copyFrom(*src, stream);
414414
};
415415

416-
auto copyStrs = [batchSize, stream](
416+
auto copyStrs = [batchSize](
417417
SVectorPtr& dst, const SVectorPtr& src, int startRow, bool useGpu) {
418418
if (!src) {
419419
dst.reset();

paddle/parameter/AverageOptimizer.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,9 @@ ParameterOptimizer::TraverseCallback AverageOptimizer::needSpecialTraversal(
8181
if (numUpdates_ % kMaxNumAccumulates == 0) {
8282
// Move the sum to a different buffer to avoid loss of precision
8383
// due to too many sums.
84-
callbacks.emplace_back([this](const VectorPtr vecs[],
85-
const ParameterConfig& config,
86-
size_t sparseId) {
84+
callbacks.emplace_back([](const VectorPtr vecs[],
85+
const ParameterConfig& config,
86+
size_t sparseId) {
8787
vecs[PARAMETER_SUM2]->add(*vecs[PARAMETER_SUM1]);
8888
vecs[PARAMETER_SUM1]->zeroMem();
8989
});
@@ -94,9 +94,9 @@ ParameterOptimizer::TraverseCallback AverageOptimizer::needSpecialTraversal(
9494
if (auto callback = this->startCatchUpWith()) {
9595
callbacks.emplace_back(callback);
9696
}
97-
callbacks.emplace_back([this](const VectorPtr vecs[],
98-
const ParameterConfig& config,
99-
size_t sparseId) {
97+
callbacks.emplace_back([](const VectorPtr vecs[],
98+
const ParameterConfig& config,
99+
size_t sparseId) {
100100
vecs[PARAMETER_SUM3]->add(*vecs[PARAMETER_SUM1], *vecs[PARAMETER_SUM2]);
101101
vecs[PARAMETER_SUM1]->zeroMem();
102102
vecs[PARAMETER_SUM2]->zeroMem();

paddle/parameter/FirstOrderOptimizer.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -145,9 +145,9 @@ AdagradParameterOptimizer::needSpecialTraversal(
145145
if (numUpdates_ % kMaxNumAccumulates == 0) {
146146
// Move the sum to a different buffer to avoid loss of precision
147147
// due to too many sums.
148-
return [this](const VectorPtr vecs[],
149-
const ParameterConfig& config,
150-
size_t sparseId) {
148+
return [](const VectorPtr vecs[],
149+
const ParameterConfig& config,
150+
size_t sparseId) {
151151
vecs[PARAMETER_GRADIENT_SQURESUM]->add(
152152
*vecs[PARAMETER_GRADIENT_SQURESUM1]);
153153
vecs[PARAMETER_GRADIENT_SQURESUM1]->zeroMem();

0 commit comments

Comments
 (0)