diff --git a/hls4ml/backends/quartus/quartus_backend.py b/hls4ml/backends/quartus/quartus_backend.py index ab79bd820d..60969fa726 100644 --- a/hls4ml/backends/quartus/quartus_backend.py +++ b/hls4ml/backends/quartus/quartus_backend.py @@ -43,21 +43,24 @@ def _register_flows(self): ] quantization_flow = register_flow('quantization', quantization_passes, requires=[init_flow], backend=self.name) + optimization_passes = [] + optimization_flow = register_flow('optimize', optimization_passes, requires=[init_flow], backend=self.name) + templates = self._get_layer_templates() - template_flow = register_flow('apply_templates', templates, requires=[init_flow], backend=self.name) + template_flow = register_flow('apply_templates', self._get_layer_templates, requires=[init_flow], backend=self.name) writer_passes = [ 'make_stamp', 'quartus:write_hls' ] - writer_flow_requirements = ['optimize', quartus_types_flow, template_flow] - self._writer_flow = register_flow('write', writer_passes, requires=writer_flow_requirements, backend=self.name) + + self._writer_flow = register_flow('write', writer_passes, requires=['quartus:ip'], backend=self.name) all_passes = get_backend_passes(self.name) extras = [ # Ideally this should be empty - opt_pass for opt_pass in all_passes if opt_pass not in initializers + quartus_types + templates + writer_passes + opt_pass for opt_pass in all_passes if opt_pass not in initializers + streaming_passes + quartus_types + quantization_passes + templates + optimization_passes + writer_passes ] if len(extras) > 0: @@ -65,7 +68,7 @@ def _register_flows(self): else: extras_flow = None - ip_flow_requirements = ['optimize', init_flow, streaming_flow, quantization_flow, quartus_types_flow, extras_flow, template_flow] + ip_flow_requirements = ['optimize', init_flow, streaming_flow, quantization_flow, optimization_flow, quartus_types_flow, extras_flow, template_flow] ip_flow_requirements = list(filter(None, ip_flow_requirements)) self._default_flow = register_flow('ip', None, requires=ip_flow_requirements, backend=self.name) diff --git a/test/pytest/test_extensions.py b/test/pytest/test_extensions.py index af772b3d8f..201059c9c9 100644 --- a/test/pytest/test_extensions.py +++ b/test/pytest/test_extensions.py @@ -1,13 +1,12 @@ +import pytest import hls4ml -import tensorflow as tf import numpy as np -import pytest +import tensorflow as tf from pathlib import Path test_root_path = Path(__file__).parent # Keras implementation of a custom layer - class KReverse(tf.keras.layers.Layer): ''' Keras implementation of a hypothetical custom layer ''' def __init__(self): @@ -16,8 +15,7 @@ def __init__(self): def call(self, inputs): return tf.reverse(inputs, axis=[-1]) -# hls4ml implementations - +# hls4ml layer implementation class HReverse(hls4ml.model.layers.Layer): ''' hls4ml implementation of a hypothetical custom layer ''' @@ -27,8 +25,35 @@ def initialize(self): dims = inp.dim_names self.add_output_variable(shape, dims) +# hls4ml optimizer to remove duplicate optimizer +class RemoveDuplicateReverse(hls4ml.model.optimizer.OptimizerPass): + '''OptimizerPass to remove consecutive HReverse layers.''' + + def match(self, node): + return isinstance(node, HReverse) and \ + isinstance(node.get_input_node(), HReverse) + + def transform(self, model, node): + first = node.get_input_node() + second = node -# Templates + model.remove_node(first, rewire=True) + model.remove_node(second, rewire=True) + return True + +# Parser for converter +def parse_reverse_layer(keras_layer, input_names, input_shapes, data_reader, config): + layer = {} + layer['class_name'] = 'HReverse' + layer['name'] = keras_layer['config']['name'] + layer['n_in'] = input_shapes[0][1] + + if input_names is not None: + layer['inputs'] = input_names + + return layer, [shape for shape in input_shapes[0]] + +# HLS Templates - No specific pragmas used; generic enough for both Intel and Vivado rev_config_template = """struct config{index} : nnet::reverse_config {{ static const unsigned n_in = {n_in}; @@ -55,8 +80,6 @@ def format(self, node): params = self._default_function_params(node) return self.template.format(**params) - -# HLS implementation rev_hls = \ """#ifndef NNET_REVERSE_H_ #define NNET_REVERSE_H_ @@ -74,8 +97,6 @@ def format(self, node): data_T input[CONFIG_T::n_in], data_T reversed[CONFIG_T::n_in] ) { - #pragma HLS PIPELINE - for (int i = 0; i < CONFIG_T::n_in; i++) { reversed[CONFIG_T::n_in - 1 - i] = input[i]; } @@ -86,43 +107,19 @@ def format(self, node): #endif """ -class RemoveDuplicateReverse(hls4ml.model.optimizer.OptimizerPass): - '''OptimizerPass to remove consecutive HReverse layers.''' - - def match(self, node): - return isinstance(node, HReverse) and \ - isinstance(node.get_input_node(), HReverse) - - def transform(self, model, node): - first = node.get_input_node() - second = node - - model.remove_node(first, rewire=True) - model.remove_node(second, rewire=True) - return True - -# Parser for converter -def parse_reverse_layer(keras_layer, input_names, input_shapes, data_reader, config): - layer = {} - layer['class_name'] = 'HReverse' - layer['name'] = keras_layer['config']['name'] - layer['n_in'] = input_shapes[0][1] - - if input_names is not None: - layer['inputs'] = input_names - - return layer, [shape for shape in input_shapes[0]] - -def test_extensions(tmp_path): +@pytest.fixture(scope='session', autouse=True) +def regsister_custom_layer(): # Register the converter for custom Keras layer hls4ml.converters.register_keras_layer_handler('KReverse', parse_reverse_layer) # Register the hls4ml's IR layer hls4ml.model.layers.register_layer('HReverse', HReverse) +@pytest.mark.parametrize('backend_id', ['Vivado', 'Quartus']) +def test_extensions(tmp_path, backend_id): # Register the optimization passes (if any) - backend = hls4ml.backends.get_backend('Vivado') - backend.register_pass('remove_duplicate_reverse', RemoveDuplicateReverse, flow='vivado:optimize') + backend = hls4ml.backends.get_backend(backend_id) + backend.register_pass('remove_duplicate_reverse', RemoveDuplicateReverse, flow=f'{backend_id.lower()}:optimize') # Register template passes for the given backend backend.register_template(HReverseConfigTemplate) @@ -148,15 +145,15 @@ def test_extensions(tmp_path): hmodel = hls4ml.converters.convert_from_keras_model( kmodel, - output_dir=str(test_root_path / 'hls4mlprj_extensions'), - backend='Vivado', + output_dir=str(test_root_path / f'hls4mlprj_extensions_{backend_id}'), + backend=backend_id, io_type='io_parallel', - hls_config={ 'Model': { 'Precision': 'ap_int<4>', 'ReuseFactor': 1} }) + hls_config={ 'Model': { 'Precision': 'ap_int<6>', 'ReuseFactor': 1} }) hmodel.compile() hres = hmodel.predict(x.astype('float32')) # Check if the optimizer pass was applied - assert 'vivado:remove_duplicate_reverse' in hmodel._applied_flows[0]['vivado:optimize'] + assert f'{backend_id.lower()}:remove_duplicate_reverse' in hmodel._applied_flows[0][f'{backend_id.lower()}:optimize'] np.testing.assert_array_equal(kres, hres)