Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions paddle/gserver/dataproviders/PyDataProvider2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -390,9 +390,7 @@ class PyDataProvider2 : public DataProvider {

if (this->loadThread_) { // wait poolActualSize < poolSize;
std::unique_lock<std::mutex> l(mtx_);
pushCV_.wait(l, [this, additionalBatchSize] {
return this->poolActualSize_ < poolSize_;
});
pushCV_.wait(l, [this] { return this->poolActualSize_ < poolSize_; });
}

{
Expand Down
2 changes: 1 addition & 1 deletion paddle/gserver/gradientmachines/MultiGradientMachine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ MultiGradientMachine::MultiGradientMachine(const ModelConfig& config,
} else {
numDevices_ = 0;
}
ParamInitCallback mainParamInitCb = [this](int paramId, Parameter* para) {
ParamInitCallback mainParamInitCb = [](int paramId, Parameter* para) {
// only create buf for CPU parameters
// GPU parameters will be created in each thread
if (para->useGpu()) return;
Expand Down
2 changes: 1 addition & 1 deletion paddle/gserver/layers/RecurrentLayerGroup.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ void RecurrentLayerGroup::initSubNetwork(
setNeedGradient(true);

network_.reset(new RecurrentGradientMachine(config_.name(), rootNetwork));
ParamInitCallback cb = [this, rootNetwork](int paramId, Parameter* para) {
ParamInitCallback cb = [rootNetwork](int paramId, Parameter* para) {
para->enableSharedType(
PARAMETER_VALUE,
rootNetwork->getParameters()[paramId]->getBuf(PARAMETER_VALUE),
Expand Down
14 changes: 7 additions & 7 deletions paddle/parameter/Argument.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -325,12 +325,12 @@ void Argument::concat(const std::vector<Argument>& args,
->copyFrom(*src->subVec(srcStartRow, size), stream);
};

auto copyStrs = [batchSize, stream](SVectorPtr& dst,
const SVectorPtr& src,
int desStartRow,
int srcStartRow,
int size,
bool useGpu) {
auto copyStrs = [batchSize](SVectorPtr& dst,
const SVectorPtr& src,
int desStartRow,
int srcStartRow,
int size,
bool useGpu) {
if (!src) {
dst.reset();
return;
Expand Down Expand Up @@ -413,7 +413,7 @@ void Argument::concat(const std::vector<Argument>& args,
dst->subVec(startRow, src->getSize())->copyFrom(*src, stream);
};

auto copyStrs = [batchSize, stream](
auto copyStrs = [batchSize](
SVectorPtr& dst, const SVectorPtr& src, int startRow, bool useGpu) {
if (!src) {
dst.reset();
Expand Down
12 changes: 6 additions & 6 deletions paddle/parameter/AverageOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ ParameterOptimizer::TraverseCallback AverageOptimizer::needSpecialTraversal(
if (numUpdates_ % kMaxNumAccumulates == 0) {
// Move the sum to a different buffer to avoid loss of precision
// due to too many sums.
callbacks.emplace_back([this](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
callbacks.emplace_back([](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
vecs[PARAMETER_SUM2]->add(*vecs[PARAMETER_SUM1]);
vecs[PARAMETER_SUM1]->zeroMem();
});
Expand All @@ -94,9 +94,9 @@ ParameterOptimizer::TraverseCallback AverageOptimizer::needSpecialTraversal(
if (auto callback = this->startCatchUpWith()) {
callbacks.emplace_back(callback);
}
callbacks.emplace_back([this](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
callbacks.emplace_back([](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
vecs[PARAMETER_SUM3]->add(*vecs[PARAMETER_SUM1], *vecs[PARAMETER_SUM2]);
vecs[PARAMETER_SUM1]->zeroMem();
vecs[PARAMETER_SUM2]->zeroMem();
Expand Down
6 changes: 3 additions & 3 deletions paddle/parameter/FirstOrderOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,9 @@ AdagradParameterOptimizer::needSpecialTraversal(
if (numUpdates_ % kMaxNumAccumulates == 0) {
// Move the sum to a different buffer to avoid loss of precision
// due to too many sums.
return [this](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
return [](const VectorPtr vecs[],
const ParameterConfig& config,
size_t sparseId) {
vecs[PARAMETER_GRADIENT_SQURESUM]->add(
*vecs[PARAMETER_GRADIENT_SQURESUM1]);
vecs[PARAMETER_GRADIENT_SQURESUM1]->zeroMem();
Expand Down