Skip to content

Commit 7109f0e

Browse files
authored
Fix for QActivations passed as an argument (fastmachinelearning#553)
* Fix handling of QKeras activations passed as an argument * Add a test for QKeras activations passed as an argument
1 parent 2dafb98 commit 7109f0e

File tree

3 files changed

+119
-16
lines changed

3 files changed

+119
-16
lines changed

hls4ml/converters/keras_to_hls.py

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -328,16 +328,32 @@ def keras_to_hls(config):
328328
layer_list.append( layer )
329329
if 'activation' in layer and layer['class_name'] not in activation_layers + recurrent_layers:# + qkeras_layers:
330330
act_layer = {}
331-
act_layer['name'] = layer['name'] + '_' + layer['activation']
332-
act_layer['activation'] = layer['activation']
333-
if 'activ_param' in layer:
334-
act_layer['activ_param'] = layer['activ_param']
335-
act_layer['class_name'] = layer['activation']
336-
elif layer['activation'] == 'softmax':
337-
act_layer['class_name'] = 'Softmax'
338-
act_layer['axis'] = -1
331+
# Workaround for QKeras activations passed as an argument
332+
if isinstance(layer['activation'], dict):
333+
act_details = layer['activation']
334+
act_layer['class_name'] = 'QActivation'
335+
act_layer['config'] = {
336+
'name': layer['name'] + '_' + act_details['class_name'],
337+
'activation': act_details['class_name']
338+
}
339+
act_layer, output_shape = layer_handlers['QActivation'](
340+
act_layer,
341+
None,
342+
[output_shape],
343+
reader,
344+
config
345+
)
339346
else:
340-
act_layer['class_name'] = 'Activation'
347+
act_layer['name'] = layer['name'] + '_' + layer['activation']
348+
act_layer['activation'] = layer['activation']
349+
if 'activ_param' in layer:
350+
act_layer['activ_param'] = layer['activ_param']
351+
act_layer['class_name'] = layer['activation']
352+
elif layer['activation'] == 'softmax':
353+
act_layer['class_name'] = 'Softmax'
354+
act_layer['axis'] = -1
355+
else:
356+
act_layer['class_name'] = 'Activation'
341357
inputs_map[layer['name']] = act_layer['name']
342358
if output_layers is not None and layer['name'] in output_layers:
343359
output_layers = [act_layer['name'] if name == layer['name'] else name for name in output_layers]

hls4ml/utils/config.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -177,14 +177,21 @@ def config_from_keras_model(model, granularity='model', default_precision='ap_fi
177177

178178
print('Layer name: {}, layer type: {}'.format(layer['name'], layer['class_name']))
179179
layer_list.append( layer )
180-
if 'activation' in layer['config'] and layer['class_name'] not in activation_layers + qkeras_layers:
180+
if 'activation' in layer['config'] and layer['class_name'] not in activation_layers:
181181
act_layer = {}
182-
act_layer['name'] = layer['name'] + '_' + layer['config']['activation']
183-
act_layer['class_name'] = 'Activation'
184-
print(' -> Activation ({}), layer name: {}'.format(layer['config']['activation'], layer['name']))
182+
act_details = layer['config']['activation']
183+
if isinstance(act_details, dict):
184+
precision = _get_precision_from_quantizer(act_details)
185+
act_details = act_details['class_name']
186+
act_layer['precision'] = {}
187+
act_layer['precision']['result'] = precision
188+
act_layer['class_name'] = 'QActivation'
189+
else:
190+
act_layer['class_name'] = 'Activation'
191+
act_layer['name'] = layer['name'] + '_' + act_details
192+
print(' -> Activation ({}), layer name: {}'.format(act_details, layer['name']))
185193
layer_list.append(act_layer)
186194

187-
188195
def make_layer_config(layer):
189196
layer_config = {}
190197
if layer['class_name'] in dense_layers + conv_layers + rnn_layers:

test/pytest/test_qkeras.py

Lines changed: 82 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,10 @@
66
from sklearn.datasets import fetch_openml
77
from sklearn.model_selection import train_test_split
88
from sklearn.preprocessing import LabelEncoder, StandardScaler
9-
from tensorflow.keras.models import Sequential, model_from_json
9+
from tensorflow.keras.models import Sequential, Model, model_from_json
1010
from tensorflow.keras.optimizers import Adam
1111
from tensorflow.keras.regularizers import l1
12-
from tensorflow.keras.layers import Activation, BatchNormalization
12+
from tensorflow.keras.layers import Activation, BatchNormalization, Input
1313
from qkeras.qlayers import QDense, QActivation
1414
from qkeras.quantizers import quantized_bits, quantized_relu, ternary, binary
1515
from qkeras.utils import _add_supported_quantized_objects; co = {}; _add_supported_quantized_objects(co)
@@ -228,3 +228,83 @@ def test_quantizer(randX_1000_1, quantizer, backend):
228228
y_hls4ml = hls_model.predict(X)
229229
# Goal is to get it passing with all equal
230230
np.testing.assert_array_equal(y_qkeras, y_hls4ml)
231+
232+
233+
@pytest.mark.parametrize(
234+
'weight_quantizer,activation_quantizer,', [
235+
('binary', 'binary'),
236+
('ternary', 'ternary'),
237+
('quantized_bits(4, 0, alpha=1)', 'quantized_relu(2, 0)'),
238+
('quantized_bits(4, 0, alpha=1)', 'quantized_relu(4, 0)'),
239+
('quantized_bits(4, 0, alpha=1)', 'quantized_relu(8, 0)')
240+
]
241+
)
242+
def test_qactivation_kwarg(randX_100_10,
243+
activation_quantizer,
244+
weight_quantizer):
245+
if activation_quantizer in ['binary', 'ternary']:
246+
name = 'bnbt_qdense_alpha'
247+
else:
248+
name = 'qdense_{}'.format(
249+
eval(activation_quantizer).__class__.__name__)
250+
251+
inputs = Input(shape=(10,))
252+
253+
outputs = QDense(
254+
10,
255+
activation=activation_quantizer,
256+
name='qdense',
257+
kernel_quantizer=weight_quantizer,
258+
bias_quantizer=weight_quantizer,
259+
kernel_initializer='lecun_uniform'
260+
)(inputs)
261+
model = Model(inputs, outputs)
262+
263+
hls4ml.model.optimizer.get_optimizer(
264+
'output_rounding_saturation_mode'
265+
).configure(
266+
layers=[name],
267+
rounding_mode='AP_RND_CONV',
268+
saturation_mode='AP_SAT'
269+
)
270+
config = hls4ml.utils.config_from_keras_model(
271+
model,
272+
granularity='name'
273+
)
274+
275+
out_dir = str(
276+
test_root_path / 'hls4mlprj_qactivation_kwarg_{}'.format(
277+
activation_quantizer
278+
)
279+
)
280+
281+
hls_model = hls4ml.converters.convert_from_keras_model(
282+
model,
283+
hls_config=config,
284+
output_dir=out_dir
285+
)
286+
hls4ml.model.optimizer.get_optimizer(
287+
'output_rounding_saturation_mode'
288+
).configure(layers=[])
289+
hls_model.compile()
290+
291+
# Verify if activation in hls_model
292+
assert name in [layer.name for layer in hls_model.get_layers()]
293+
294+
# Output tests
295+
X = randX_100_10
296+
X = np.round(X * 2**10) * 2**-10
297+
y_qkeras = model.predict(X)
298+
y_hls4ml = hls_model.predict(X)
299+
if hasattr(eval(activation_quantizer), 'bits'):
300+
np.testing.assert_allclose(
301+
y_qkeras.ravel(),
302+
y_hls4ml.ravel(),
303+
atol=2**-eval(activation_quantizer).bits,
304+
rtol=1.0
305+
)
306+
else:
307+
if activation_quantizer == 'binary':
308+
y_hls4ml = np.where(y_hls4ml == 0, -1, 1)
309+
wrong = (y_hls4ml != y_qkeras).ravel()
310+
assert sum(wrong) / len(wrong) <= 0.005

0 commit comments

Comments
 (0)