Skip to content
33 changes: 33 additions & 0 deletions python/paddle/fluid/layers/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
'smooth_l1',
'one_hot',
'autoincreased_step_counter',
'lrn',
]


Expand Down Expand Up @@ -3292,3 +3293,35 @@ def autoincreased_step_counter(counter_name=None, begin=1, step=1):
counter.stop_gradient = True

return counter


def lrn(input, n=5, k=2.0, alpha=1e-4, beta=0.75, name=None):
"""
This function helps create an operator to implement
the LRN layer using the configurations from the input parameters.

Args:
n(int): The number of channels to sum over
k(float): An offset (usually positive to avoid dividing by 0)
alpha(float): The scaling parameter
beta(float): The exponent
name(str): A name for this operation
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

功能介绍部分请简单介绍下LRN,给出参考文献及链接(如果有)。
参数介绍部分,遗漏了input,没有写明默认值,缺少必要的标点。
还存在其他问题。请参考 API注释撰写标准 撰写注释。同时注意语言上的问题。

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

"""
helper = LayerHelper('lrn', **locals())
dtype = helper.input_dtype()

mid_out = helper.create_tmp_variable(dtype=dtype, stop_gradient=True)
lrn_out = helper.create_tmp_variable(dtype)
helper.append_op(
type="lrn",
inputs={"X": input},
outputs={
"Out": lrn_out,
"MidOut": mid_out,
},
attrs={"n": n,
"k": k,
"alpha": alpha,
"beta": beta})

return lrn_out