Skip to content
Snippets Groups Projects
Commit 987cbfc8 authored by Cyril Moineau's avatar Cyril Moineau
Browse files

Introduce Scheduler export + move generic C++ file generation to core.

parent cbf9c772
No related branches found
No related tags found
3 merge requests!279v0.4.0,!253v0.4.0,!163Export refactor
from .node_export import ExportNode, ExportNodeCpp from .node_export import ExportNode, ExportNodeCpp
from .code_generation import generate_file, generate_str from .code_generation import generate_file, generate_str, copy_file
from .export_registry import ExportLib, operator_register from .export_registry import ExportLib, operator_register
from .scheduler_export import ExportScheduler
...@@ -12,12 +12,23 @@ LANGUAGE = Enum('LANGUAGE', ['Cpp/C']) ...@@ -12,12 +12,23 @@ LANGUAGE = Enum('LANGUAGE', ['Cpp/C'])
class ExportLib(): # Should be abstract ? class ExportLib(): # Should be abstract ?
"""Aidge export lib, define a registry """Aidge export lib, define a registry
""" """
# PUBLIC
# Lib name usefull ? # Lib name usefull ?
_name:str = None # Help define namespace
name:str = None
# key: Path where static file is
# Value: Path where to copy the file relative to the export root
static_files:Dict[str, str] = {}
# PRIVATE
# Registry of exportNode # Registry of exportNode
_export_node_registry:Dict[str, List[ExportNode]] = {} _export_node_registry:Dict[str, List[ExportNode]] = {}
# The language type usefull ? # The language type usefull ?
_language: LANGUAGE = None _language: LANGUAGE = None
_compilo:str = None
def __init__(self) -> None: def __init__(self) -> None:
raise RuntimeError("ExportLib should not be instanciated") raise RuntimeError("ExportLib should not be instanciated")
@classmethod @classmethod
......
...@@ -113,7 +113,7 @@ class ExportNode(ABC): ...@@ -113,7 +113,7 @@ class ExportNode(ABC):
""" """
@abstractmethod @abstractmethod
def __init__(self, aidge_node: aidge_core.Node, mem_info: List[dict]) -> None: def __init__(self, aidge_node: aidge_core.Node, mem_info: List[dict], is_input: bool, is_output: bool) -> None:
"""Create ExportNode and retieve attriubtes from ``aidge_node``: """Create ExportNode and retieve attriubtes from ``aidge_node``:
""" """
...@@ -136,6 +136,7 @@ class ExportNode(ABC): ...@@ -136,6 +136,7 @@ class ExportNode(ABC):
self.attributes["in_name"] = [None] * self.attributes["nb_in"] self.attributes["in_name"] = [None] * self.attributes["nb_in"]
self.attributes["in_dims"] = [None] * self.attributes["nb_in"] self.attributes["in_dims"] = [None] * self.attributes["nb_in"]
self.attributes["in_size"] = [None] * self.attributes["nb_in"]
self.attributes["in_dformat"] = [None] * self.attributes["nb_in"] self.attributes["in_dformat"] = [None] * self.attributes["nb_in"]
self.attributes["in_format"] = [None] * self.attributes["nb_in"] self.attributes["in_format"] = [None] * self.attributes["nb_in"]
self.attributes["in_dtype"] = [None] * self.attributes["nb_in"] self.attributes["in_dtype"] = [None] * self.attributes["nb_in"]
...@@ -146,6 +147,7 @@ class ExportNode(ABC): ...@@ -146,6 +147,7 @@ class ExportNode(ABC):
self.attributes["out_name"] = [None] * self.attributes["nb_out"] self.attributes["out_name"] = [None] * self.attributes["nb_out"]
self.attributes["out_dims"] = [None] * self.attributes["nb_out"] self.attributes["out_dims"] = [None] * self.attributes["nb_out"]
self.attributes["out_size"] = [None] * self.attributes["nb_out"]
self.attributes["out_dformat"] = [None] * self.attributes["nb_out"] self.attributes["out_dformat"] = [None] * self.attributes["nb_out"]
self.attributes["out_format"] = [None] * self.attributes["nb_out"] self.attributes["out_format"] = [None] * self.attributes["nb_out"]
self.attributes["out_dtype"] = [None] * self.attributes["nb_out"] self.attributes["out_dtype"] = [None] * self.attributes["nb_out"]
...@@ -175,6 +177,7 @@ class ExportNode(ABC): ...@@ -175,6 +177,7 @@ class ExportNode(ABC):
tensor = self.operator.get_input(idx) tensor = self.operator.get_input(idx)
self.attributes["in_name"][idx] = f"{self.attributes['name']}_input_{idx}" if parent_node is None else f"{parent_node.name()}_output_{out_id}" self.attributes["in_name"][idx] = f"{self.attributes['name']}_input_{idx}" if parent_node is None else f"{parent_node.name()}_output_{out_id}"
self.attributes["in_dims"][idx] = tensor.dims() self.attributes["in_dims"][idx] = tensor.dims()
self.attributes["in_size"][idx] = tensor.size()
self.attributes["in_dformat"][idx] = tensor.dformat() self.attributes["in_dformat"][idx] = tensor.dformat()
self.attributes["in_format"][idx] = aidge_core.format_as(tensor.dformat()) self.attributes["in_format"][idx] = aidge_core.format_as(tensor.dformat())
self.attributes["in_dtype"][idx] = tensor.dtype() self.attributes["in_dtype"][idx] = tensor.dtype()
...@@ -192,6 +195,7 @@ class ExportNode(ABC): ...@@ -192,6 +195,7 @@ class ExportNode(ABC):
tensor = self.operator.get_output(idx) tensor = self.operator.get_output(idx)
self.attributes["out_name"][idx] = f"{self.attributes['name']}_output_{idx}" self.attributes["out_name"][idx] = f"{self.attributes['name']}_output_{idx}"
self.attributes["out_dims"][idx] = tensor.dims() self.attributes["out_dims"][idx] = tensor.dims()
self.attributes["out_size"][idx] = tensor.size()
self.attributes["out_dformat"][idx] = tensor.dformat() self.attributes["out_dformat"][idx] = tensor.dformat()
self.attributes["out_format"][idx] = aidge_core.format_as(tensor.dformat()) self.attributes["out_format"][idx] = aidge_core.format_as(tensor.dformat())
self.attributes["out_dtype"][idx] = tensor.dtype() self.attributes["out_dtype"][idx] = tensor.dtype()
...@@ -289,7 +293,7 @@ class ExportNodeCpp(ExportNode): ...@@ -289,7 +293,7 @@ class ExportNodeCpp(ExportNode):
) )
kernel_include_list.append( kernel_include_list.append(
self.kernels_path + "/" + kernel_path.stem + kernel_path.suffix) self.kernels_path + "/" + kernel_path.stem + kernel_path.suffix)
path_to_definition = f"layers/{self.attributes['name']}.h" path_to_definition = f"include/layers/{self.attributes['name']}.h"
code_generation.generate_file( code_generation.generate_file(
str(export_folder / path_to_definition), str(export_folder / path_to_definition),
self.config_template, self.config_template,
......
import aidge_core
import os
import shutil
from pathlib import Path
from aidge_core.export_utils import ExportLib, generate_file, copy_file
from typing import List, Tuple
class ExportScheduler():
def __init__(self, scheduler: aidge_core.Scheduler, *args, **kwargs):
self.scheduler = scheduler
self.graphview = scheduler.graph_view()
def export(self,
export_folder_path: str,
export_lib: ExportLib = None,
platform=None,
memory_manager=None,
memory_manager_args=None
) -> None:
export_folder = Path().absolute() / export_folder_path
os.makedirs(str(export_folder), exist_ok=True)
dnn_folder = export_folder / "dnn"
os.makedirs(str(dnn_folder), exist_ok=True)
if memory_manager_args is None:
memory_manager_args = {}
peak_mem, mem_info = memory_manager(
self.scheduler, **memory_manager_args)
# List of function call
list_actions: List[str] = []
# List of headers to include to get the configuration files
list_configs: List[str] = []
inputs_name: List[str] = []
inputs_dtype: List[str] = []
outputs_name: List[str] = []
outputs_dtype: List[str] = []
outputs_size: List[int] = []
list_forward_nodes = self.scheduler.get_static_scheduling()
# If exportLib define use it
# else parse component in platform
if export_lib is not None:
for node in list_forward_nodes:
if export_lib.exportable(node):
is_input = node in self.graphview.get_input_nodes()
is_output = node in self.graphview.get_output_nodes()
op = export_lib.get_export_node(node)(
node, mem_info[node], is_input, is_output)
# For configuration files
list_configs = op.export(dnn_folder, list_configs)
# For forward file
list_actions = op.forward(list_actions)
if is_input:
for idx in range(len(node.inputs())):
inputs_name.append(op.attributes["in_name"][idx])
inputs_dtype.append(
op.attributes["in_cdtype"][idx])
if is_output:
for idx in range(len(node.outputs())):
outputs_name.append(op.attributes["out_name"][idx])
outputs_dtype.append(
op.attributes["out_cdtype"][idx])
outputs_size.append(op.attributes["out_size"][idx])
else:
raise RuntimeError(
f"Operator not supported: {node.type()} for export lib {export_lib._name} !")
else:
raise ValueError("Current export only support export lib.")
func_name = "model_forward"
args = ", ".join([f"{dtype} const {name}*" for name,
dtype in zip(inputs_name, inputs_dtype)])
args += ", ".join([f"{dtype} {name}*" for name,
dtype in zip(outputs_name, outputs_dtype)])
forward_func = f"void {func_name}()"
ROOT = Path(__file__).resolve().parents[0]
generate_file(
str(dnn_folder / "src" / "forward.cpp"),
str(ROOT / "templates" / "forward.jinja"),
forward_function=forward_func,
headers=set(list_configs),
actions=list_actions,
mem_ctype=inputs_dtype[0], # Legacy behavior ...
peak_mem=peak_mem
)
# Generate dnn API
generate_file(
str(dnn_folder / "include" / "forward.hpp"),
str(ROOT / "templates" / "forward_header.jinja"),
libraries=[],
functions=[forward_func],
)
if len(outputs_name) != len(outputs_dtype) or len(outputs_name) != len(outputs_size):
raise RuntimeError("FATAL: Output args list does not have the same lenght this is an internal bug.")
generate_file(
str(export_folder / "main.cpp"),
str(ROOT / "templates" / "main.jinja"),
func_name=func_name,
inputs_name=inputs_name,
outputs_name=outputs_name,
outputs_dtype=outputs_dtype,
outputs_size=outputs_size
)
# Copy all static files in the export
for source, destination in export_lib.static_files.items():
copy_file(source, str(export_folder / destination))
#include <stdint.h>
#ifdef SAVE_OUTPUTS
#include <sys/types.h>
#include <sys/stat.h>
#endif
#include "network/rescaling.hpp"
// Layer & memory configurations
{%- for header in headers %}
#include "{{ header }}"
{%- endfor %}
// Memory block
static {{mem_ctype}} mem[{{peak_mem}}];
{# Forward function #}
{#- Support multiple inputs with different datatypes and multiple outputs with different datatypes -#}
{{ forward_function }}
{
#ifdef SAVE_OUTPUTS
// Creation of the outputs directory
struct stat st {};
if (stat("outputs", &st) == -1) {
mkdir("outputs", 0700);
}
#endif
{%- for action in actions %}
{{ action }}
{%- endfor %}
}
$#ifndef DNN_HPP
#define DNN_HPP
{#- For libraries #}
{% for lib in libraries %}
#include <{{ lib }}>
{%- endfor %}
{% for func in functions %}
{{ func }}
{% endfor %}
#endif /* DNN_HPP */
#include <iostream>
#include "dnn.hpp"
#include "inputs.h"
int main()
{
// Initialize the output arrays
{%- for i in range(outputs_name | length) %}
{{ outputs_dtype[i] }} {{ outputs_name[i] }}[{{ outputs_size[i] }}];
{% endfor -%}
// Call the forward function
{{ func_name }}({{ inputs_name|join(", ") }}, {{ outputs_name|join(", ") }});
// Print the results of each output
{%- for i in range(outputs_name | length) %}
std::cout << "{{ outputs_name[i] }}:" << std::endl;
for (int i = 0; i < {{ outputs_size[i] }}; ++i) {
std::cout << {{ outputs_name[i] }}[i] << " ";
}
std::cout << std::endl;
{% endfor %}
return 0;
}
...@@ -35,7 +35,6 @@ def compute_default_mem_info(scheduler: aidge_core.Scheduler) -> Tuple[int, List ...@@ -35,7 +35,6 @@ def compute_default_mem_info(scheduler: aidge_core.Scheduler) -> Tuple[int, List
# Increment offset for the next layer # Increment offset for the next layer
mem_size += mem mem_size += mem
print(f"Adding meminfo to {node.name()}")
mem_info[node] = node_mem_info mem_info[node] = node_mem_info
else: else:
mem_info[node] = [] # No meminfo for producer mem_info[node] = [] # No meminfo for producer
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment