Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion paddle/py_paddle/dataprovider_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def finish_scan(self, argument):
if len(self.__shape__) > 1:
# The last-two dimenstions are the frame height and width.
# For example, the layout is CHW for 3-D feature of image.
# The H and W are the fram height and width.
# The H and W are the frame height and width.
h, w = self.__shape__[-2:]
argument.setSlotFrameHeight(self.pos, h)
argument.setSlotFrameWidth(self.pos, w)
Expand Down
23 changes: 13 additions & 10 deletions python/paddle/v2/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class Parameters(object):
def __init__(self):
self.__param_conf__ = dict()
self.__gradient_machines__ = []
self.__tmp_params__ = []
self.__tmp_params__ = dict()

def __append_config__(self, param_conf):
"""
Expand Down Expand Up @@ -128,13 +128,10 @@ def __getitem__(self, key):

if len(self.__gradient_machines__) == 0:
# create new parameter in python numpy.
if len(self.__tmp_params__) != 0:
ret_list = [
mat for name, mat in self.__tmp_params__ if name == key
]
if len(ret_list) == 1:
return ret_list[0]
return np.ndarray(shape=shape, dtype=np.float32)
if key in self.__tmp_params__:
return self.__tmp_params__[key]
else:
return np.ndarray(shape=shape, dtype=np.float32)
else:
for each_gradient_machine in self.__gradient_machines__:
param = __get_parameter_in_gradient_machine__(
Expand Down Expand Up @@ -187,7 +184,7 @@ def __setitem__(self, key, value):
(shape, value.shape))

if len(self.__gradient_machines__) == 0:
self.__tmp_params__.append((key, value))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The append could lead the repeated key in self.__tmp_params__. This is a bug.

self.__tmp_params__[key] = value
else:
for each_gradient_machine in self.__gradient_machines__:
__copy_parameter_to_gradient_machine__(each_gradient_machine,
Expand Down Expand Up @@ -231,7 +228,7 @@ def append_gradient_machine(self, gradient_machine):
raise ValueError("gradient_machine should be api.GradientMachine")

if len(self.__tmp_params__) != 0:
for name, val in self.__tmp_params__:
for name, val in self.__tmp_params__.iteritems():
try:
__copy_parameter_to_gradient_machine__(gradient_machine,
name, val)
Expand Down Expand Up @@ -302,6 +299,12 @@ def from_tar(f):
params.deserialize(param_name, f)
return params

def init_from_tar(self, f):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because from_tar is a static method. Maybe Parameters.from_tar is better.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done.

tar_param = Parameters.from_tar(f)
for pname in tar_param.names():
if pname in self.names():
self.set(pname, tar_param.get(pname))


def __get_parameter_in_gradient_machine__(gradient_machine, name):
"""
Expand Down
57 changes: 52 additions & 5 deletions python/paddle/v2/tests/test_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,17 @@
import numpy


def __rand_param_config__(name):
def __rand_param_config__(name, psize=None):
conf = ParameterConfig()
conf.name = name
size = 1
for i in xrange(2):
dim = random.randint(1, 1000)
conf.dims.append(dim)
size *= dim
if psize is None:
for i in xrange(2):
dim = random.randint(1, 1000)
conf.dims.append(dim)
size *= dim
else:
size = psize
conf.size = size
assert conf.IsInitialized()
return conf
Expand Down Expand Up @@ -77,6 +80,50 @@ def initializer(name):
expected = numpy.array([[1, 1], [1, 2], [1, 1]], numpy.float32)
assert numpy.logical_and.reduce(numpy.reshape(val == expected, 6))

def test_init_from_tar(self):
def get_param(names, size):
p = parameters.Parameters()
for k, v in zip(names, size):
p.__append_config__(__rand_param_config__(k, v))
for name in p.names():
param = p.get(name)
param[:] = numpy.random.uniform(
-1.0, 1.0, size=p.get_shape(name))
p.set(name, param)
return p

def get_parames():
name1 = ['param_0', 'param_1']
size1 = [128, 256]
p1 = get_param(name1, size1)
file1 = cStringIO.StringIO()
p1.to_tar(file1)
file1.seek(0)

name2 = ['param_0', 'param_1', 'param_2']
size2 = [128, 256, 288]
p2 = get_param(name2, size2)
file2 = cStringIO.StringIO()
p2.to_tar(file2)
file2.seek(0)
return p1, file1, p2, file2

p1, file1, p2, file2 = get_parames()
p2.init_from_tar(file1)
for name in p1.names():
self.assertEqual(p1.get_shape(name), p2.get_shape(name))
v1 = p1.get(name)
v2 = p2.get(name)
self.assertTrue(numpy.isclose(v1, v2).all())

p1, file1, p2, file2 = get_parames()
p1.init_from_tar(file2)
for name in p1.names():
self.assertEqual(p1.get_shape(name), p2.get_shape(name))
v1 = p1.get(name)
v2 = p2.get(name)
self.assertTrue(numpy.isclose(v1, v2).all())


if __name__ == '__main__':
unittest.main()