Skip to content

Commit f0bcd4f

Browse files
authored
Correctly expand dims of pointwise layer (#715)
* Correctly expand dims of pointwise layer * Style rules applied
1 parent 5ac737c commit f0bcd4f

File tree

3 files changed

+114
-62
lines changed

3 files changed

+114
-62
lines changed
Lines changed: 38 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,56 @@
1-
import numpy as np
21
from copy import copy
32

4-
from hls4ml.model.optimizer import OptimizerPass
5-
from hls4ml.model.layers import register_layer
3+
import numpy as np
4+
65
from hls4ml.backends.fpga.fpga_layers import PointwiseConv1D, PointwiseConv2D
7-
from hls4ml.backends.quartus.passes.convolution_templates import Conv1DConfigTemplate, Conv1DFunctionTemplate, Conv2DConfigTemplate, Conv2DFunctionTemplate, conv1d_config_template, conv2d_config_template, conv_mult_config_template
6+
from hls4ml.backends.quartus.passes.convolution_templates import (
7+
Conv1DConfigTemplate,
8+
Conv1DFunctionTemplate,
9+
Conv2DConfigTemplate,
10+
Conv2DFunctionTemplate,
11+
conv1d_config_template,
12+
conv2d_config_template,
13+
conv_mult_config_template,
14+
)
15+
from hls4ml.model.layers import register_layer
16+
from hls4ml.model.optimizer import OptimizerPass
817

918
'''
1019
Custom hls4ml layer implementation for 1x1 Conv filters using im2col
1120
Allows lower latency andresource usage, due to less loop invocations
1221
'''
1322

14-
pointwise_conv1d_function_template = 'nnet::pointwise_conv_1d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
15-
pointwise_conv2d_function_template = 'nnet::pointwise_conv_2d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
23+
pointwise_conv1d_function_template = (
24+
'nnet::pointwise_conv_1d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
25+
)
26+
pointwise_conv2d_function_template = (
27+
'nnet::pointwise_conv_2d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
28+
)
1629

1730
sepconv1d_include_list = ['nnet_utils/nnet_conv1d.h']
1831
sepconv2d_include_list = ['nnet_utils/nnet_conv2d.h']
1932

33+
2034
class PointwiseConv1DConfigTemplate(Conv1DConfigTemplate):
2135
def __init__(self):
2236
super(Conv1DConfigTemplate, self).__init__(PointwiseConv1D)
2337
self.template = conv1d_config_template
2438
self.mult_template = conv_mult_config_template
2539

40+
2641
class PointwiseConv1DFunctionTemplate(Conv1DFunctionTemplate):
2742
def __init__(self):
2843
super(Conv1DFunctionTemplate, self).__init__(PointwiseConv1D, include_header=sepconv1d_include_list)
2944
self.template = pointwise_conv1d_function_template
3045

46+
3147
class PointwiseConv2DConfigTemplate(Conv2DConfigTemplate):
3248
def __init__(self):
3349
super(Conv2DConfigTemplate, self).__init__(PointwiseConv2D)
3450
self.template = conv2d_config_template
3551
self.mult_template = conv_mult_config_template
3652

53+
3754
class PointwiseConv2DFunctionTemplate(Conv2DFunctionTemplate):
3855
def __init__(self):
3956
super(Conv2DFunctionTemplate, self).__init__(PointwiseConv2D, include_header=sepconv2d_include_list)
@@ -54,19 +71,25 @@ def register_pointwise(backend):
5471
backend.register_template(PointwiseConv2DConfigTemplate)
5572
backend.register_template(PointwiseConv2DFunctionTemplate)
5673

74+
5775
class OptimizePointwiseConv(OptimizerPass):
5876
def match(self, node):
59-
return node.class_name in ('Conv1D', 'Conv2D') and \
60-
node.get_attr('filt_height', 1) == 1 and \
61-
node.get_attr('filt_width') == 1 and \
62-
node.model.config.get_config_value('IOType') == 'io_parallel'
77+
return (
78+
node.class_name in ('Conv1D', 'Conv2D')
79+
and node.get_attr('filt_height', 1) == 1
80+
and node.get_attr('filt_width') == 1
81+
and node.model.config.get_config_value('IOType') == 'io_parallel'
82+
)
6383

6484
def transform(self, model, node):
65-
dim = node.__class__.__name__[-2:] # '1D' or '2D'
66-
pw_node = model.make_node('PointwiseConv' + dim, node.name, copy(node.attributes), node.inputs.copy(), outputs=node.outputs.copy())
67-
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
68-
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=(0,1))
85+
dim = node.__class__.__name__[-2:] # '1D' or '2D'
86+
pw_node = model.make_node(
87+
'PointwiseConv' + dim, node.name, copy(node.attributes), node.inputs.copy(), outputs=node.outputs.copy()
88+
)
89+
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
90+
expand_axis = tuple(range(int(dim[0])))
91+
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=expand_axis)
6992
pw_node.weights['bias'].data = node.weights['bias'].data
7093
model.replace_node(node, pw_node)
71-
94+
7295
return True
Lines changed: 35 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,51 @@
1-
import numpy as np
21
from copy import copy
32

4-
from hls4ml.model.optimizer import OptimizerPass
5-
from hls4ml.model.layers import register_layer
3+
import numpy as np
4+
65
from hls4ml.backends.fpga.fpga_layers import PointwiseConv1D, PointwiseConv2D
7-
from hls4ml.backends.vivado.passes.convolution_templates import Conv1DConfigTemplate, Conv1DFunctionTemplate, Conv2DConfigTemplate, Conv2DFunctionTemplate, conv1d_config_template, conv2d_config_template, conv_mult_config_template
6+
from hls4ml.backends.vivado.passes.convolution_templates import (
7+
Conv1DConfigTemplate,
8+
Conv1DFunctionTemplate,
9+
Conv2DConfigTemplate,
10+
Conv2DFunctionTemplate,
11+
conv1d_config_template,
12+
conv2d_config_template,
13+
conv_mult_config_template,
14+
)
15+
from hls4ml.model.layers import register_layer
16+
from hls4ml.model.optimizer import OptimizerPass
817

9-
pointwise_conv1d_function_template = 'nnet::pointwise_conv_1d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
10-
pointwise_conv2d_function_template = 'nnet::pointwise_conv_2d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
18+
pointwise_conv1d_function_template = (
19+
'nnet::pointwise_conv_1d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
20+
)
21+
pointwise_conv2d_function_template = (
22+
'nnet::pointwise_conv_2d_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
23+
)
1124

1225
sepconv1d_include_list = ['nnet_utils/nnet_conv1d.h', 'nnet_utils/nnet_sepconv1d_stream.h']
1326
sepconv2d_include_list = ['nnet_utils/nnet_conv2d.h', 'nnet_utils/nnet_sepconv2d_stream.h']
1427

28+
1529
class PointwiseConv1DConfigTemplate(Conv1DConfigTemplate):
1630
def __init__(self):
1731
super(Conv1DConfigTemplate, self).__init__(PointwiseConv1D)
1832
self.template = conv1d_config_template
1933
self.mult_template = conv_mult_config_template
2034

35+
2136
class PointwiseConv1DFunctionTemplate(Conv1DFunctionTemplate):
2237
def __init__(self):
2338
super(Conv1DFunctionTemplate, self).__init__(PointwiseConv1D, include_header=sepconv1d_include_list)
2439
self.template = pointwise_conv1d_function_template
2540

41+
2642
class PointwiseConv2DConfigTemplate(Conv2DConfigTemplate):
2743
def __init__(self):
2844
super(Conv2DConfigTemplate, self).__init__(PointwiseConv2D)
2945
self.template = conv2d_config_template
3046
self.mult_template = conv_mult_config_template
3147

48+
3249
class PointwiseConv2DFunctionTemplate(Conv2DFunctionTemplate):
3350
def __init__(self):
3451
super(Conv2DFunctionTemplate, self).__init__(PointwiseConv2D, include_header=sepconv2d_include_list)
@@ -49,18 +66,22 @@ def register_pointwise(backend):
4966
backend.register_template(PointwiseConv2DConfigTemplate)
5067
backend.register_template(PointwiseConv2DFunctionTemplate)
5168

69+
5270
class OptimizePointwiseConv(OptimizerPass):
5371
def match(self, node):
54-
return node.class_name in ('Conv1D', 'Conv2D') and \
55-
node.get_attr('filt_height', 1) == 1 and \
56-
node.get_attr('filt_width') == 1
72+
return (
73+
node.class_name in ('Conv1D', 'Conv2D')
74+
and node.get_attr('filt_height', 1) == 1
75+
and node.get_attr('filt_width') == 1
76+
)
5777

5878
def transform(self, model, node):
59-
dim = node.__class__.__name__[-2:] # '1D' or '2D'
79+
dim = node.__class__.__name__[-2:] # '1D' or '2D'
6080
pw_node = model.make_node('PointwiseConv' + dim, node.name, copy(node.attributes), node.inputs.copy())
61-
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
62-
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=(0,1))
81+
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
82+
expand_axis = tuple(range(int(dim[0])))
83+
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=expand_axis)
6384
pw_node.weights['bias'].data = node.weights['bias'].data
6485
model.replace_node(node, pw_node)
65-
66-
return True
86+
87+
return True
Lines changed: 41 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,21 @@
1-
from hls4ml.model.optimizer import OptimizerPass
2-
from hls4ml.model.layers import Dense
31
import numpy as np
42

3+
from hls4ml.model.layers import Dense
4+
from hls4ml.model.optimizer import OptimizerPass
5+
6+
57
class ReplaceMultidimensionalDenseWithConv(OptimizerPass):
68
def match(self, node):
7-
return isinstance(node, Dense) and \
8-
len(node.get_input_variable().shape) - sum(d==1 for d in node.get_input_variable().shape) > 1
9-
# The above sum checks for the number of dimensions in the Dense with size 1
10-
# The subtraction allows the check to only count the number of dimensions with non-1 size
11-
# For example, this prevents matching for a Dense layer with shape (1,N)
9+
return (
10+
isinstance(node, Dense)
11+
and len(node.get_input_variable().shape) - sum(d == 1 for d in node.get_input_variable().shape) > 1
12+
)
13+
# The above sum checks for the number of dimensions in the Dense with size 1
14+
# The subtraction allows the check to only count the number of dimensions with non-1 size
15+
# For example, this prevents matching for a Dense layer with shape (1,N)
1216

1317
def transform(self, model, node):
14-
dim = len(node.get_input_variable().shape) - 1
18+
dim = len(node.get_input_variable().shape) - 1
1519
input_shape = node.get_input_variable().shape
1620

1721
pointwise_attrs = {
@@ -22,37 +26,41 @@ def transform(self, model, node):
2226
}
2327

2428
if dim == 1:
25-
pointwise_attrs.update({
26-
'in_width': input_shape[0],
27-
'out_width': input_shape[0],
28-
'filt_width': 1,
29-
'stride_width': 1,
30-
'pad_left': 0,
31-
'pad_right': 0,
32-
})
29+
pointwise_attrs.update(
30+
{
31+
'in_width': input_shape[0],
32+
'out_width': input_shape[0],
33+
'filt_width': 1,
34+
'stride_width': 1,
35+
'pad_left': 0,
36+
'pad_right': 0,
37+
}
38+
)
3339
elif dim == 2:
34-
pointwise_attrs.update({
35-
'in_height': input_shape[0],
36-
'in_width': input_shape[1],
37-
'out_height': input_shape[0],
38-
'out_width': input_shape[1],
39-
'filt_height': 1,
40-
'filt_width': 1,
41-
'stride_height': 1,
42-
'stride_width': 1,
43-
'pad_top': 0,
44-
'pad_bottom': 0,
45-
'pad_left': 0,
46-
'pad_right': 0,
47-
})
40+
pointwise_attrs.update(
41+
{
42+
'in_height': input_shape[0],
43+
'in_width': input_shape[1],
44+
'out_height': input_shape[0],
45+
'out_width': input_shape[1],
46+
'filt_height': 1,
47+
'filt_width': 1,
48+
'stride_height': 1,
49+
'stride_width': 1,
50+
'pad_top': 0,
51+
'pad_bottom': 0,
52+
'pad_left': 0,
53+
'pad_right': 0,
54+
}
55+
)
4856
else:
4957
raise Exception('Cannot replace Dense over {dim}D tensor with Conv{dim}D.'.format(dim=dim))
5058

5159
class_name = 'PointwiseConv' + str(dim) + 'D'
5260
pw_node = model.make_node(class_name, node.name, pointwise_attrs, node.inputs.copy())
53-
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
54-
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=(0,1))
61+
if len(node.weights['weight'].data.shape) == 2: # This can happen if we assign weights of Dense layer to 1x1 Conv2D
62+
pw_node.weights['weight'].data = np.expand_dims(node.weights['weight'].data, axis=tuple(range(dim)))
5563
pw_node.weights['bias'].data = node.weights['bias'].data
5664
model.replace_node(node, pw_node)
57-
65+
5866
return True

0 commit comments

Comments
 (0)