From 987cbfc87364886e95b22ab97d85b961daf4fed0 Mon Sep 17 00:00:00 2001
From: cmoineau <cyril.moineau@cea.fr>
Date: Tue, 23 Jul 2024 06:21:16 +0000
Subject: [PATCH] Introduce Scheduler export + move generic C++ file generation
 to core.

---
 aidge_core/export_utils/__init__.py           |   3 +-
 aidge_core/export_utils/export_registry.py    |  13 +-
 aidge_core/export_utils/node_export.py        |   8 +-
 aidge_core/export_utils/scheduler_export.py   | 116 ++++++++++++++++++
 .../export_utils/templates/forward.jinja      |  33 +++++
 .../templates/forward_header.jinja            |  13 ++
 aidge_core/export_utils/templates/main.jinja  |  26 ++++
 aidge_core/mem_info.py                        |   1 -
 8 files changed, 208 insertions(+), 5 deletions(-)
 create mode 100644 aidge_core/export_utils/scheduler_export.py
 create mode 100644 aidge_core/export_utils/templates/forward.jinja
 create mode 100644 aidge_core/export_utils/templates/forward_header.jinja
 create mode 100644 aidge_core/export_utils/templates/main.jinja

diff --git a/aidge_core/export_utils/__init__.py b/aidge_core/export_utils/__init__.py
index 00e22a8ef..403036030 100644
--- a/aidge_core/export_utils/__init__.py
+++ b/aidge_core/export_utils/__init__.py
@@ -1,3 +1,4 @@
 from .node_export import ExportNode, ExportNodeCpp
-from .code_generation import generate_file, generate_str
+from .code_generation import generate_file, generate_str, copy_file
 from .export_registry import ExportLib, operator_register
+from .scheduler_export import ExportScheduler
diff --git a/aidge_core/export_utils/export_registry.py b/aidge_core/export_utils/export_registry.py
index b0f77a783..1d6dcde88 100644
--- a/aidge_core/export_utils/export_registry.py
+++ b/aidge_core/export_utils/export_registry.py
@@ -12,12 +12,23 @@ LANGUAGE = Enum('LANGUAGE', ['Cpp/C'])
 class ExportLib(): # Should be abstract ?
     """Aidge export lib, define a registry
     """
+    # PUBLIC
     # Lib name usefull ?
-    _name:str = None
+    # Help define namespace
+    name:str = None
+    # key: Path where static file is
+    # Value: Path where to copy the file relative to the export root
+    static_files:Dict[str, str] = {}
+    # PRIVATE
     # Registry of exportNode
     _export_node_registry:Dict[str, List[ExportNode]] = {}
     # The language type usefull ?
     _language: LANGUAGE = None
+    _compilo:str = None
+
+
+
+
     def __init__(self) -> None:
          raise RuntimeError("ExportLib should not be instanciated")
     @classmethod
diff --git a/aidge_core/export_utils/node_export.py b/aidge_core/export_utils/node_export.py
index df698ca8e..a6a370c6b 100644
--- a/aidge_core/export_utils/node_export.py
+++ b/aidge_core/export_utils/node_export.py
@@ -113,7 +113,7 @@ class ExportNode(ABC):
     """
 
     @abstractmethod
-    def __init__(self, aidge_node: aidge_core.Node, mem_info: List[dict]) -> None:
+    def __init__(self, aidge_node: aidge_core.Node, mem_info: List[dict], is_input: bool, is_output: bool) -> None:
         """Create ExportNode and retieve attriubtes from ``aidge_node``:
         """
 
@@ -136,6 +136,7 @@ class ExportNode(ABC):
 
         self.attributes["in_name"] = [None] * self.attributes["nb_in"]
         self.attributes["in_dims"] = [None] * self.attributes["nb_in"]
+        self.attributes["in_size"] = [None] * self.attributes["nb_in"]
         self.attributes["in_dformat"] = [None] * self.attributes["nb_in"]
         self.attributes["in_format"] = [None] * self.attributes["nb_in"]
         self.attributes["in_dtype"] = [None] * self.attributes["nb_in"]
@@ -146,6 +147,7 @@ class ExportNode(ABC):
 
         self.attributes["out_name"] = [None] * self.attributes["nb_out"]
         self.attributes["out_dims"] = [None] * self.attributes["nb_out"]
+        self.attributes["out_size"] = [None] * self.attributes["nb_out"]
         self.attributes["out_dformat"] = [None] * self.attributes["nb_out"]
         self.attributes["out_format"] = [None] * self.attributes["nb_out"]
         self.attributes["out_dtype"] = [None] * self.attributes["nb_out"]
@@ -175,6 +177,7 @@ class ExportNode(ABC):
                 tensor = self.operator.get_input(idx)
                 self.attributes["in_name"][idx] = f"{self.attributes['name']}_input_{idx}" if parent_node is None else f"{parent_node.name()}_output_{out_id}"
                 self.attributes["in_dims"][idx] = tensor.dims()
+                self.attributes["in_size"][idx] = tensor.size()
                 self.attributes["in_dformat"][idx] = tensor.dformat()
                 self.attributes["in_format"][idx] = aidge_core.format_as(tensor.dformat())
                 self.attributes["in_dtype"][idx] = tensor.dtype()
@@ -192,6 +195,7 @@ class ExportNode(ABC):
                 tensor = self.operator.get_output(idx)
                 self.attributes["out_name"][idx] = f"{self.attributes['name']}_output_{idx}"
                 self.attributes["out_dims"][idx] = tensor.dims()
+                self.attributes["out_size"][idx] = tensor.size()
                 self.attributes["out_dformat"][idx] = tensor.dformat()
                 self.attributes["out_format"][idx] = aidge_core.format_as(tensor.dformat())
                 self.attributes["out_dtype"][idx] = tensor.dtype()
@@ -289,7 +293,7 @@ class ExportNodeCpp(ExportNode):
             )
             kernel_include_list.append(
                 self.kernels_path + "/" + kernel_path.stem + kernel_path.suffix)
-        path_to_definition = f"layers/{self.attributes['name']}.h"
+        path_to_definition = f"include/layers/{self.attributes['name']}.h"
         code_generation.generate_file(
             str(export_folder / path_to_definition),
             self.config_template,
diff --git a/aidge_core/export_utils/scheduler_export.py b/aidge_core/export_utils/scheduler_export.py
new file mode 100644
index 000000000..be7815f3b
--- /dev/null
+++ b/aidge_core/export_utils/scheduler_export.py
@@ -0,0 +1,116 @@
+import aidge_core
+import os
+import shutil
+from pathlib import Path
+from aidge_core.export_utils import ExportLib, generate_file, copy_file
+from typing import List, Tuple
+
+
+class ExportScheduler():
+    def __init__(self, scheduler: aidge_core.Scheduler, *args, **kwargs):
+        self.scheduler = scheduler
+        self.graphview = scheduler.graph_view()
+
+    def export(self,
+               export_folder_path: str,
+               export_lib: ExportLib = None,
+               platform=None,
+               memory_manager=None,
+               memory_manager_args=None
+               ) -> None:
+        export_folder = Path().absolute() / export_folder_path
+
+        os.makedirs(str(export_folder), exist_ok=True)
+
+        dnn_folder = export_folder / "dnn"
+        os.makedirs(str(dnn_folder), exist_ok=True)
+        if memory_manager_args is None:
+            memory_manager_args = {}
+        peak_mem, mem_info = memory_manager(
+            self.scheduler, **memory_manager_args)
+
+        # List of function call
+        list_actions: List[str] = []
+        # List of headers to include to get the configuration files
+        list_configs: List[str] = []
+
+        inputs_name: List[str] = []
+        inputs_dtype: List[str] = []
+        outputs_name: List[str] = []
+        outputs_dtype: List[str] = []
+        outputs_size: List[int] = []
+
+        list_forward_nodes = self.scheduler.get_static_scheduling()
+        # If exportLib define use it
+        # else parse component in platform
+        if export_lib is not None:
+            for node in list_forward_nodes:
+                if export_lib.exportable(node):
+
+                    is_input = node in self.graphview.get_input_nodes()
+                    is_output = node in self.graphview.get_output_nodes()
+                    op = export_lib.get_export_node(node)(
+                        node, mem_info[node], is_input, is_output)
+                    # For configuration files
+                    list_configs = op.export(dnn_folder, list_configs)
+                    # For forward file
+                    list_actions = op.forward(list_actions)
+                    if is_input:
+                        for idx in range(len(node.inputs())):
+                            inputs_name.append(op.attributes["in_name"][idx])
+                            inputs_dtype.append(
+                                op.attributes["in_cdtype"][idx])
+                    if is_output:
+                        for idx in range(len(node.outputs())):
+                            outputs_name.append(op.attributes["out_name"][idx])
+                            outputs_dtype.append(
+                                op.attributes["out_cdtype"][idx])
+                            outputs_size.append(op.attributes["out_size"][idx])
+                else:
+                    raise RuntimeError(
+                        f"Operator not supported: {node.type()} for export lib {export_lib._name} !")
+        else:
+            raise ValueError("Current export only support export lib.")
+
+        func_name = "model_forward"
+        args = ", ".join([f"{dtype} const {name}*" for name,
+                         dtype in zip(inputs_name, inputs_dtype)])
+        args += ", ".join([f"{dtype} {name}*" for name,
+                          dtype in zip(outputs_name, outputs_dtype)])
+        forward_func = f"void {func_name}()"
+
+        ROOT = Path(__file__).resolve().parents[0]
+
+        generate_file(
+            str(dnn_folder / "src" / "forward.cpp"),
+            str(ROOT / "templates" / "forward.jinja"),
+            forward_function=forward_func,
+            headers=set(list_configs),
+            actions=list_actions,
+            mem_ctype=inputs_dtype[0],  # Legacy behavior ...
+            peak_mem=peak_mem
+        )
+
+        # Generate dnn API
+        generate_file(
+            str(dnn_folder / "include" / "forward.hpp"),
+            str(ROOT / "templates" / "forward_header.jinja"),
+            libraries=[],
+            functions=[forward_func],
+        )
+
+        if len(outputs_name) != len(outputs_dtype) or len(outputs_name) != len(outputs_size):
+            raise RuntimeError("FATAL: Output args list does not have the same lenght this is an internal bug.")
+
+        generate_file(
+            str(export_folder / "main.cpp"),
+            str(ROOT / "templates" / "main.jinja"),
+            func_name=func_name,
+            inputs_name=inputs_name,
+            outputs_name=outputs_name,
+            outputs_dtype=outputs_dtype,
+            outputs_size=outputs_size
+        )
+        # Copy all static files in the export
+        for source, destination in export_lib.static_files.items():
+            copy_file(source, str(export_folder / destination))
diff --git a/aidge_core/export_utils/templates/forward.jinja b/aidge_core/export_utils/templates/forward.jinja
new file mode 100644
index 000000000..736fbd906
--- /dev/null
+++ b/aidge_core/export_utils/templates/forward.jinja
@@ -0,0 +1,33 @@
+
+#include <stdint.h>
+
+#ifdef SAVE_OUTPUTS
+#include <sys/types.h>
+#include <sys/stat.h>
+#endif
+#include "network/rescaling.hpp"
+
+// Layer & memory configurations
+{%- for header in headers %}
+#include "{{ header }}"
+{%- endfor %}
+
+// Memory block
+static {{mem_ctype}} mem[{{peak_mem}}];
+
+{# Forward function #}
+{#- Support multiple inputs with different datatypes and multiple outputs with different datatypes -#}
+{{ forward_function }}
+{
+    #ifdef SAVE_OUTPUTS
+    // Creation of the outputs directory
+    struct stat st {};
+    if (stat("outputs", &st) == -1) {
+        mkdir("outputs", 0700);
+    }
+    #endif
+
+    {%- for action in actions %}
+    {{ action }}
+    {%- endfor %}
+}
diff --git a/aidge_core/export_utils/templates/forward_header.jinja b/aidge_core/export_utils/templates/forward_header.jinja
new file mode 100644
index 000000000..ad9f77441
--- /dev/null
+++ b/aidge_core/export_utils/templates/forward_header.jinja
@@ -0,0 +1,13 @@
+$#ifndef DNN_HPP
+#define DNN_HPP
+
+{#- For libraries #}
+{% for lib in libraries %}
+#include <{{ lib }}>
+{%- endfor %}
+
+{% for func in functions %}
+{{ func }}
+{% endfor %}
+
+#endif /* DNN_HPP */
diff --git a/aidge_core/export_utils/templates/main.jinja b/aidge_core/export_utils/templates/main.jinja
new file mode 100644
index 000000000..bbe0df8cb
--- /dev/null
+++ b/aidge_core/export_utils/templates/main.jinja
@@ -0,0 +1,26 @@
+
+#include <iostream>
+#include "dnn.hpp"
+#include "inputs.h"
+
+int main()
+{
+    // Initialize the output arrays
+    {%- for i in range(outputs_name | length) %}
+    {{ outputs_dtype[i] }} {{ outputs_name[i] }}[{{ outputs_size[i] }}];
+    {% endfor -%}
+
+    // Call the forward function
+    {{ func_name }}({{ inputs_name|join(", ") }}, {{ outputs_name|join(", ") }});
+
+
+    // Print the results of each output
+    {%- for i in range(outputs_name | length) %}
+    std::cout << "{{ outputs_name[i] }}:" << std::endl;
+    for (int i = 0; i < {{ outputs_size[i] }}; ++i) {
+        std::cout << {{ outputs_name[i] }}[i] << " ";
+    }
+    std::cout << std::endl;
+    {% endfor %}
+    return 0;
+}
diff --git a/aidge_core/mem_info.py b/aidge_core/mem_info.py
index 0541ccf4f..3319e4807 100644
--- a/aidge_core/mem_info.py
+++ b/aidge_core/mem_info.py
@@ -35,7 +35,6 @@ def compute_default_mem_info(scheduler: aidge_core.Scheduler) -> Tuple[int, List
 
                 # Increment offset for the next layer
                 mem_size += mem
-            print(f"Adding meminfo to {node.name()}")
             mem_info[node] = node_mem_info
         else:
             mem_info[node] = [] # No meminfo for producer
-- 
GitLab