Skip to content
Snippets Groups Projects
Commit 7bd99c80 authored by Grégoire Kubler's avatar Grégoire Kubler
Browse files

Merge remote-tracking branch 'origin/dev' into feat/release_pip

parents 63cfd5da b3d5b86e
No related branches found
No related tags found
2 merge requests!212Version 0.3.0,!116feat/release_pip
Pipeline #49725 waiting for manual action
Showing
with 351 additions and 4 deletions
#ifndef EXPORT_ATTRIBUTES_{{name|upper}}_H
#define EXPORT_ATTRIBUTES_{{name|upper}}_H
#define _{{name|upper}}_IN_CHANNELS {{InChannels}}
#define _{{name|upper}}_OUT_CHANNELS {{OutChannels}}
#endif /* EXPORT_ATTRIBUTES_{{name|upper}}_H */
#ifndef EXPORT_ATTRIBUTES_{{name|upper}}_H
#define EXPORT_ATTRIBUTES_{{name|upper}}_H
{% for i in range(KernelDims|length) %}
#define _{{name|upper}}_KERNEL_{{i}} {{KernelDims[i]}}
{%- endfor %}
{% for i in range(StrideDims|length) %}
#define _{{name|upper}}_STRIDE_{{i}} {{StrideDims[i]}}
{%- endfor %}
#define _{{name|upper}}_CEIL_MODE {{CeilMode|int}}
#endif /* EXPORT_ATTRIBUTES_{{name|upper}}_H */
/********************************************************************************
* This file has been generated by the Aidge export.
********************************************************************************/
/*** STD INCLUDES ***/
#include <memory> // std::shared_ptr
/*** AIDGE INCLUDES ***/
#include <aidge/graph/GraphView.hpp> // Aidge::GraphView
#include <aidge/graph/Node.hpp> // Aidge::Node
#include <aidge/graph/OpArgs.hpp> // Aidge::Sequential
/*** AIDGE OPERATORS ***/
{%- for operator in operators %}
#include <aidge/operator/{{operator}}.hpp>
{%- endfor %}
/*** OPERATOR ATTRIBUTES & PARAMETERS ***/
{%- for header in headers %}
#include "{{ header }}"
{%- endfor %}
/*** HEADER ***/
#include "dnn.hpp"
std::shared_ptr<Aidge::GraphView> generateModel() {
/*** BUILDING GRAPH ***/
std::shared_ptr<Aidge::GraphView> graph = std::make_shared<Aidge::GraphView>();
{%- for action in actions %}
{{ action }}
{%- endfor %}
return graph;
}
{# NOTE: Trying a shorter notation like {%- for input in inputs if input[0] %}
will mess up loop.index as the input set up at None will not increment ! #}
{%- for input in inputs %}
{%- if input[0] %}
{{input[0]}}->addChild({{name}}, {{input[1]}}, {{loop.index - 1}}); {# NOTE: loop.index begin at 1 #}
{%- endif %}
{%- endfor %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::Conv(
_{{name|upper}}_IN_CHANNELS,
_{{name|upper}}_OUT_CHANNELS,
{
{%- for i in range(KernelDims|length) -%}
_{{name|upper}}_KERNEL_{{i}}{%- if not loop.last %}, {% endif -%}
{%- endfor -%}
},
"{{name}}",
{
{%- for i in range(StrideDims|length) -%}
_{{name|upper}}_STRIDE_{{i}} {%- if not loop.last %}, {% endif -%}
{%- endfor -%}
},
{
{%- for i in range(DilationDims|length) -%}
_{{name|upper}}_DILATION_{{i}} {%- if not loop.last %}, {% endif -%}
{%- endfor -%}
}
);
{% include "./_set_input.jinja" %}
graph->add({{name}});
{% endfilter %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::FC(
_{{name|upper}}_IN_CHANNELS,
_{{name|upper}}_OUT_CHANNELS,
"{{name}}"
);
{% include "./_set_input.jinja" %}
graph->add({{name}});
{% endfilter %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::MaxPooling(
{
{%- for i in range(KernelDims|length) -%}
_{{name|upper}}_KERNEL_{{i}}{%- if not loop.last %}, {% endif -%}
{%- endfor -%}
},
"{{name}}",
{
{%- for i in range(StrideDims|length) -%}
_{{name|upper}}_STRIDE_{{i}} {%- if not loop.last %}, {% endif -%}
{%- endfor -%}
},
_{{name|upper}}_CEIL_MODE
);
{% include "./_set_input.jinja" %}
graph->add({{name}});
{% endfilter %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::Producer(
{{tensor_name}},
"{{name}}"
);
graph->add({{name}});
{% endfilter %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::ReLU(
"{{name}}"
);
{% include "./_set_input.jinja" %}
graph->add({{name}});
{% endfilter %}
{% filter indent(width=4, first=False) %}
/*** {{name|upper}} ***/
std::shared_ptr<Aidge::Node> {{name}} =
Aidge::Sub(
"{{name}}"
);
{% include "./_set_input.jinja" %}
graph->add({{name}});
{% endfilter %}
#ifndef EXPORT_PARAMETERS_{{name|upper}}_H
#define EXPORT_PARAMETERS_{{name|upper}}_H
#include <aidge/data/Tensor.hpp>
#include <memory>
std::shared_ptr<Aidge::Tensor> {{name}} = std::make_shared<Aidge::Tensor>(Aidge::Array{{dims|length}}D<{{data_t}}, {{ dims|join(", ") }}> {
{{ values }}
});
#endif /* EXPORT_PARAMETERS_{{name|upper}}_H */
#include <pybind11/pybind11.h>
#include "dnn.hpp"
namespace py = pybind11;
void init_{{name}}(py::module& m){
m.def("generate_model", generateModel);
}
PYBIND11_MODULE({{name}}, m) {
init_{{name}}(m);
}
from .operator_registry import *
def parse_node_input(node_inputs: list) -> list:
"""Parse node intputs in order to adapt the list for Jinja.
:param node_inputs: return of node.inputs()
:type node_inputs: list of tuple of aidge_core.Node, output idx.
:return: list of tuple of node name, output idx.
:rtype: list
"""
return [None if parent_node is None else (parent_node.name(), outId) for parent_node, outId in node_inputs]
OPERATORS_REGISTRY = {}
def operator_register(*args):
key_list = [arg for arg in args]
def decorator(operator):
def wrapper(*args, **kwargs):
return operator(*args, **kwargs)
for key in key_list:
OPERATORS_REGISTRY[key] = operator
return wrapper
return decorator
def supported_operators():
return list(OPERATORS_REGISTRY.keys())
File moved
import os from pathlib import Path
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from typing import Union
def generate_file(file_path: str, template_path: str, **kwargs) -> None: def generate_file(file_path: Union[Path, str], template_path: Union[Path, str], **kwargs) -> None:
"""Generate a file at `file_path` using the jinja template located at `file_path`. """Generate a file at `file_path` using the jinja template located at `file_path`.
kwargs are used to fill the template. kwargs are used to fill the template.
:param file_path: path where to generate the file :param file_path: path where to generate the file
:type file_path: str :type file_path: pathlib.Path or str
:param template_path: Path to the template to use for code generation :param template_path: Path to the template to use for code generation
:type template_path: str :type template_path: pathlib.Path or str
""" """
# Get directory name of the file # Convert str -> Path for compatibility !
dirname = os.path.dirname(file_path) if isinstance(file_path, str):
file_path = Path(file_path)
# If directory doesn't exist, create it if isinstance(template_path, str):
if not os.path.exists(dirname): template_path = Path(template_path)
os.makedirs(dirname) # Make dir
file_path.parent.mkdir(parents=True, exist_ok=True)
# Get directory name and name of the template
template_dir = os.path.dirname(template_path)
template_name = os.path.basename(template_path)
# Select template # Select template
template = Environment(loader=FileSystemLoader( template = Environment(loader=FileSystemLoader(
template_dir)).get_template(template_name) template_path.parent)).get_template(template_path.name)
# Generate file # Generate file
content = template.render(kwargs) with open(file_path, mode="w", encoding="utf-8") as file:
with open(file_path, mode="w", encoding="utf-8") as message: file.write(template.render(kwargs))
message.write(content)
def generate_str(template_path:str, **kwargs) -> str: def generate_str(template_path: Union[Path, str], **kwargs) -> str:
"""Generate a string using the jinja template located at `file_path`. """Generate a string using the jinja template located at `file_path`.
kwargs are used to fill the template. kwargs are used to fill the template.
:param template_path: Path to the template to use for code generation :param template_path: Path to the template to use for code generation
:type template_path: str :type template_path: pathlib.Path or str
:return: A string of the interpreted template :return: A string of the interpreted template
:rtype: str :rtype: str
""" """
dirname = os.path.dirname(template_path) # Convert str -> Path for compatibility !
filename = os.path.basename(template_path) if isinstance(template_path, str):
template = Environment(loader=FileSystemLoader(dirname)).get_template(filename) template_path = Path(template_path)
return template.render(kwargs) return Environment(loader=FileSystemLoader(
template_path.parent)).get_template(template_path.name).render(kwargs)
...@@ -20,10 +20,7 @@ class ExportNode(ABC): ...@@ -20,10 +20,7 @@ class ExportNode(ABC):
self.node = aidge_node self.node = aidge_node
self.operator = aidge_node.get_operator() self.operator = aidge_node.get_operator()
self.name = self.node.name() self.name = self.node.name()
self.attributes = {} # Attributes are auto fetched from aidge operators self.attributes = self.operator.attr.dict() if self.operator.attr is not None else {} # Attributes are auto fetched from aidge operators
if isinstance(self.operator, Attributes):
for attr_name in self.operator.get_attrs_name():
self.attributes[attr_name] = self.operator.get_attr(attr_name)
# rename is_leaf ? # rename is_leaf ?
self.is_last = len(self.node.get_children()) == 0 self.is_last = len(self.node.get_children()) == 0
......
/*
Example main.cpp used to test aidge export.
This file is copied in the test export.
*/
#include <iostream>
#include "include/dnn.hpp"
int main()
{
std::cout << "BEGIN" << std::endl;
std::shared_ptr<Aidge::GraphView> graph = generateModel();
std::cout << "END" << std::endl;
return 0;
}
"""
Copyright (c) 2023 CEA-List
This program and the accompanying materials are made available under the
terms of the Eclipse Public License 2.0 which is available at
http://www.eclipse.org/legal/epl-2.0.
SPDX-License-Identifier: EPL-2.0
"""
import unittest
import aidge_core
from functools import reduce
import pathlib
import os
import sys
import subprocess
import shutil
import numpy as np
def initFiller(model):
# Initialize parameters (weights and biases)
for node in model.get_nodes():
if node.type() == "Producer":
prod_op = node.get_operator()
value = prod_op.get_output(0)
value.set_backend("cpu")
tuple_out = node.output(0)[0]
# No conv in current network
if tuple_out[0].type() == "Conv" and tuple_out[1]==1:
# Conv weight
aidge_core.xavier_uniform_filler(value)
elif tuple_out[0].type() == "Conv" and tuple_out[1]==2:
# Conv bias
aidge_core.constant_filler(value, 0.01)
elif tuple_out[0].type() == "FC" and tuple_out[1]==1:
# FC weight
aidge_core.normal_filler(value)
elif tuple_out[0].type() == "FC" and tuple_out[1]==2:
# FC bias
aidge_core.constant_filler(value, 0.01)
else:
pass
class test_export(unittest.TestCase):
"""Test aidge export
"""
def setUp(self):
self.EXPORT_PATH = pathlib.Path("myexport")
def tearDown(self):
pass
def test_generate_export(self):
# Create model
model = aidge_core.sequential([
aidge_core.FC(in_channels=32*32*3, out_channels=512, name="InputNode"),
aidge_core.ReLU(name="Relu0"),
aidge_core.FC(in_channels=512, out_channels=256, name="FC1"),
aidge_core.ReLU(name="Relu1"),
aidge_core.FC(in_channels=256, out_channels=128, name="FC2"),
aidge_core.ReLU(name="Relu2"),
aidge_core.FC(in_channels=128, out_channels=10, name="OutputNode"),
])
initFiller(model)
# Export model
aidge_core.export(self.EXPORT_PATH, model)
self.assertTrue(self.EXPORT_PATH.is_dir(), "Export folder has not been generated")
os.makedirs(self.EXPORT_PATH / "build", exist_ok=True)
# Test compilation of export
install_path = os.path.join(sys.prefix, "lib", "libAidge") if "AIDGE_INSTALL" not in os.environ else os.environ["AIDGE_INSTALL"]
shutil.copyfile(pathlib.Path(__file__).parent / "static/main.cpp", self.EXPORT_PATH / "main.cpp")
subprocess.check_call(['cmake', str(self.EXPORT_PATH.absolute()), f'-DCMAKE_INSTALL_PREFIX:PATH={install_path}'], cwd=str(self.EXPORT_PATH / "build"))
subprocess.check_call(['make', 'all', 'install'], cwd=str(self.EXPORT_PATH / "build"))
if __name__ == '__main__':
unittest.main()
...@@ -41,6 +41,7 @@ class test_OperatorImpl(unittest.TestCase): ...@@ -41,6 +41,7 @@ class test_OperatorImpl(unittest.TestCase):
generic_matmul_op = matmul.get_operator() generic_matmul_op = matmul.get_operator()
generic_matmul_op.set_forward_dims(lambda x: x) generic_matmul_op.set_forward_dims(lambda x: x)
generic_matmul_op.set_impl(testImpl(generic_matmul_op)) generic_matmul_op.set_impl(testImpl(generic_matmul_op))
generic_matmul_op.set_input(0, aidge_core.Tensor(np.arange(18).reshape(1,2,3,3)))
generic_matmul_op.forward() generic_matmul_op.forward()
self.assertEqual(GLOBAL_CPT, 1) self.assertEqual(GLOBAL_CPT, 1)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment