Skip to content

Add tracing support for the quartus backend #583

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jul 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 41 additions & 0 deletions hls4ml/templates/quartus/firmware/nnet_utils/nnet_helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
#include <fstream>
#include <algorithm>
#include <map>
#include <sstream>
#include <iostream>

namespace nnet {

Expand Down Expand Up @@ -58,6 +60,45 @@ constexpr int pow2(int x){
return x == 0 ? 1 : 2 * pow2(x - 1);
}

template<class data_T, class save_T>
void save_output_array(data_T *data, save_T *ptr, size_t layer_size) {
for(int i = 0; i < layer_size; i++) {
ptr[i] = static_cast<save_T>(data[i].to_double());
}
}

// We don't want to include save_T in this function because it will be inserted into myproject.cpp
// so a workaround with element size is used
template<class data_T>
void save_layer_output(data_T *data, const char *layer_name, size_t layer_size) {
if (!trace_enabled) return;

if (trace_outputs) {
if (trace_outputs->count(layer_name) > 0) {
if (trace_type_size == 4) {
save_output_array(data, (float *) (*trace_outputs)[layer_name], layer_size);
} else if (trace_type_size == 8) {
save_output_array(data, (double *) (*trace_outputs)[layer_name], layer_size);
} else {
std::cout << "Unknown trace type!" << std::endl;
}
} else {
std::cout << "Layer name: " << layer_name << " not found in debug storage!" << std::endl;
}
} else {
std::ostringstream filename;
filename << "./tb_data/" << layer_name << "_output.log"; //TODO if run as a shared lib, path should be ../tb_data
std::fstream out;
out.open(filename.str(), std::ios::app);
assert(out.is_open());
for(int i = 0; i < layer_size; i++) {
out << data[i] << " "; // We don't care about precision in text files
}
out << std::endl;
out.close();
}
}

}

#endif
8 changes: 6 additions & 2 deletions hls4ml/writer/quartus_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,11 @@ def write_project_cpp(self, model):
func = layer.get_attr('function_cpp', None)
if func:
newline += ' ' + func + '\n'
if model.config.trace_output and layer.get_attr('Trace', False):
newline += '#ifndef HLS_SYNTHESIS\n'
for var in vars:
newline += ' nnet::save_layer_output<{}>({}, "{}", {});\n'.format(var.type.name, var.name, layer.name, var.size_cpp())
newline += '#endif\n'
newline += '\n'

# Just copy line
Expand Down Expand Up @@ -400,8 +405,7 @@ def write_bridge(self, model):
newline = ''
for layer in model.get_layers():
func = layer.get_attr('function_cpp')
if func and model.config.trace_output and model.config.get_layer_config_value(layer, 'Trace',
False):
if func and model.config.trace_output and layer.get_attr('Trace', False):
vars = layer.get_variables()
for var in vars:
newline += indent + 'nnet::trace_outputs->insert(std::pair<std::string, void *>("{}", (void *) malloc({} * element_size)));\n'.format(
Expand Down
46 changes: 46 additions & 0 deletions test/pytest/test_trace.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import pytest
import hls4ml
import hls4ml.model.profiling
import tensorflow as tf
import numpy as np
from pathlib import Path
from tensorflow.keras.layers import Dense, Activation

test_root_path = Path(__file__).parent

@pytest.mark.parametrize('backend', ['Vivado', 'Quartus'])
def test_trace(backend):
'''Test the tracing feature with a simple Keras model.'''
model = tf.keras.models.Sequential()
model.add(Dense(2,
input_shape=(1,),
name='Dense',
use_bias=True,
kernel_initializer= tf.keras.initializers.RandomUniform(minval=1, maxval=10),
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None))
model.add(Activation(activation='elu', name='Activation'))
model.compile(optimizer='adam', loss='mse')

X_input = np.random.rand(100,1)

keras_prediction = model.predict(X_input)

config = hls4ml.utils.config_from_keras_model(model, granularity='name')
for layer in config['LayerName'].keys():
config['LayerName'][layer]['Trace'] = True

output_dir = str(test_root_path / f'hls4mlprj_trace_{backend}')

hls_model = hls4ml.converters.convert_from_keras_model(model, hls_config=config, output_dir=output_dir, backend=backend)

hls_model.compile()
hls4ml_pred, hls4ml_trace = hls_model.trace(X_input)
keras_trace = hls4ml.model.profiling.get_ymodel_keras(model, X_input)

np.testing.assert_allclose(hls4ml_trace['Dense'], keras_trace['Dense'], rtol=1e-2, atol=0.01)
np.testing.assert_allclose(hls4ml_pred, keras_prediction, rtol=1e-2, atol=0.01)