Skip to content
Snippets Groups Projects
Commit c594db35 authored by Olivier BICHLER's avatar Olivier BICHLER
Browse files

Restored LeakyReLU export

parent c7ce1826
No related branches found
No related tags found
2 merge requests!710.4.0,!59Continuous improvement of export_cpp
......@@ -3,25 +3,47 @@
#include "network/typedefs.hpp"
template<int NB_DATA,
template<int NB_ELTS,
// Memory mapping: inputs
int INPUT_MEM_CONT_OFFSET,
int INPUT_MEM_CONT_SIZE,
int INPUT_MEM_WRAP_OFFSET,
int INPUT_MEM_WRAP_SIZE,
int INPUT_MEM_STRIDE,
// Memory mapping: outputs
int OUTPUT_MEM_CONT_OFFSET,
int OUTPUT_MEM_CONT_SIZE,
int OUTPUT_MEM_WRAP_OFFSET,
int OUTPUT_MEM_WRAP_SIZE,
int OUTPUT_MEM_STRIDE,
typename Input_T, typename Output_T>
__attribute__((always_inline)) inline
__attribute__((always_inline)) inline
void leakyrelu_forward (
const Input_T* __restrict inputs,
Output_T* __restrict outputs,
const float negative_slope)
{
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (int i = 0; i < NB_DATA; ++i) {
if (inputs[i] >= 0) {
outputs[i] = inputs[i];
} else {
outputs[i] = negative_slope * inputs[i];
size_t inOffset = 0;
size_t outOffset = 0;
for (int i = 0; i < NB_ELTS; ++i) {
if (INPUT_MEM_WRAP_SIZE > 0 && i == (INPUT_MEM_CONT_SIZE / sizeof(Input_T))) {
inOffset = (INPUT_MEM_WRAP_OFFSET - INPUT_MEM_CONT_OFFSET
- INPUT_MEM_CONT_SIZE) / sizeof(Input_T);
}
if (OUTPUT_MEM_WRAP_SIZE > 0 && i == (OUTPUT_MEM_CONT_SIZE / sizeof(Output_T))) {
outOffset = (OUTPUT_MEM_WRAP_OFFSET - OUTPUT_MEM_CONT_OFFSET
- OUTPUT_MEM_CONT_SIZE) / sizeof(Output_T);
}
if (inputs[inOffset + i] >= 0) {
outputs[outOffset + i] = inputs[inOffset + i];
}
else {
outputs[outOffset + i] = negative_slope * inputs[inOffset + i];
}
}
}
#endif // __AIDGE_EXPORT_CPP_KERNELS_LEAKYRELU__
\ No newline at end of file
#endif // __AIDGE_EXPORT_CPP_KERNELS_LEAKYRELU__
......@@ -34,7 +34,34 @@ class ReLU(ExportNodeCpp):
if self.attributes["aidge_cmp"]:
self.include_list.append("network/utils.hpp") # aidge_cmp function
self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
@ExportLibCpp.register("LeakyReLU",
aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)),
aidge_core.ProdConso.in_place_model)
class LeakyReLU(ExportNodeCpp):
def __init__(self, node, mem_info):
super().__init__(node, mem_info)
# Initialize kernel attributes
self.attributes["alpha"] = node.get_operator().attr.negative_slope
self.attributes["aidge_cmp"] = node.attributes().has_attr("aidge_cmp")
# Template for layer configutation file generation
self.config_template = str(ROOT / "templates" / "configuration" / "leakyrelu_config.jinja")
# Template layer call function generation within the forward file
self.forward_template = str(ROOT / "templates" / "kernel_forward" / "leakyrelu_forward.jinja")
# Files to include within the generated forward.cpp file
self.include_list = []
# Path to the kernel(s) files to copy
self.add_kernel_to_copy(ROOT / "kernels" / "leakyrelu.hpp")
# Include aidge outputs within the fwd file
if self.attributes["aidge_cmp"]:
self.include_list.append("network/utils.hpp") # aidge_cmp function
self.include_list.append("data/aidge_outputs/" + node.name() + ".hpp")
@ExportLibCpp.register_metaop("QReLU", aidge_core.ImplSpec(aidge_core.IOSpec(aidge_core.dtype.any)))
class QReLU(ReLU):
......
......@@ -5,7 +5,7 @@
{# For layer configuration -#}
{% include "./_def_io.jinja" %}
{% include "./_meminfo.jinja" %}
#define {{ name|upper }}_NB_DATA {{ nb_data }}
#define {{ name|upper }}_NB_ELTS {{ in_dims[0]|join('*') }}
#define {{ name|upper }}_ALPHA {{ alpha }}
#endif /* {{ name|upper }}_LAYER_H */
{% filter indent(width=4, first=False) %}
{% include "./_mem_offset.jinja" %}
leakyrelu_forward<{{name|upper}}_NB_DATA>
({{input_name}}, {{output_name}}, {{name|upper}}_ALPHA);
leakyrelu_forward<{{name|upper}}_NB_ELTS,
{{ in_name[0]|upper }}_MEM_CONT_OFFSET,
{{ in_name[0]|upper }}_MEM_CONT_SIZE,
{{ in_name[0]|upper }}_MEM_WRAP_OFFSET,
{{ in_name[0]|upper }}_MEM_WRAP_SIZE,
{{ in_name[0]|upper }}_MEM_STRIDE,
{{ out_name[0]|upper }}_MEM_CONT_OFFSET,
{{ out_name[0]|upper }}_MEM_CONT_SIZE,
{{ out_name[0]|upper }}_MEM_WRAP_OFFSET,
{{ out_name[0]|upper }}_MEM_WRAP_SIZE,
{{ out_name[0]|upper }}_MEM_STRIDE>
({{in_name[0]}}, {{out_name[0]}}, {{name|upper}}_ALPHA);
{% include "./_save_outputs.jinja" %}
{% include "./_aidge_cmp.jinja" %}
{% endfilter %}
......@@ -295,6 +295,14 @@ class test_operator_export(unittest.TestCase):
self.unit_test_export(model, "ReLU", [[1, 10]])
def test_leakyrelu(self):
print("LeakyReLU")
model = aidge_core.sequential([
aidge_core.LeakyReLU(name="leakyrelu0", negative_slope=0.1)
])
self.unit_test_export(model, "LeakyReLU", [[1, 10]])
def test_add(self):
print("Add")
model = aidge_core.sequential([
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment