diff --git a/.gitignore b/.gitignore
index f37378e300efeb5362882eb8d6eb59f028563a0e..306c9d2f5409bdf2a003e63b795885f268af391a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,16 +1,26 @@
+# general 
+.cache
+
 # C++ Build
 build*/
 install*/
+cppcheck-result.xml
 
 # VSCode
 .vscode
 
-# Python
+## Python
+# build/packaging artifacts
 *.so
 __pycache__
 *.pyc
-*.egg-info
 dist*/
+*.egg-info
+wheelhouse/*
+aidge_core/_version.py
+# test artifact
+aidge_core/dummy_export/*
+*xmlrunner-results.xml
 
 # Mermaid
 *.mmd
@@ -19,4 +29,4 @@ dist*/
 xml*/
 
 # ONNX
-*.onnx
\ No newline at end of file
+*.onnx
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3efb308fa0f78dce35973ccb47d1303d7c8634af..62219b39769d32978f44b983632fd8a117d04205 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,21 +1,34 @@
 ###############################################################################
-#            Aidge Continious Integration and Continious Deployment            #
+#                 Aidge Continuous Integration and Deployment                 #
 #                                                                             #
 ###############################################################################
 
 stages:
-  # Analyse code
   - static_analysis
-  # Build Aidge
   - build
-  # Unit test stage
   - test
-  # Code coverage
   - coverage
+  - release
+  - deploy
 
 include:
-  - local: '/.gitlab/ci/_global.gitlab-ci.yml'
-  - local: '/.gitlab/ci/static_analysis.gitlab-ci.yml'
-  - local: '/.gitlab/ci/build.gitlab-ci.yml'
-  - local: '/.gitlab/ci/test.gitlab-ci.yml'
-  - local: '/.gitlab/ci/coverage.gitlab-ci.yml'
+  - project: 'eclipse/aidge/gitlab_shared_files'
+    ref: 'main'
+    file: 
+      # choose which jobs to run by including the corresponding files.
+      - '.gitlab/ci/ubuntu_cpp.gitlab-ci.yml'
+
+      - '.gitlab/ci/ubuntu_python.gitlab-ci.yml'
+      - '.gitlab/ci/release/cibuildwheel_ubuntu.gitlab-ci.yml'   
+
+      - '.gitlab/ci/windows_cpp.gitlab-ci.yml'
+
+      - '.gitlab/ci/windows_python.gitlab-ci.yml'   
+      - '.gitlab/ci/release/cibuildwheel_windows.gitlab-ci.yml'
+
+# Required bc of test_export that cannot run in parallel in test and in coverage
+coverage:ubuntu_python:
+  needs: 
+    - build:ubuntu_python
+    - test:ubuntu_python 
+  
diff --git a/.gitlab/ci/_global.gitlab-ci.yml b/.gitlab/ci/_global.gitlab-ci.yml
deleted file mode 100644
index 94e5658ff6adc8e07036d3d59ea39a68fbddc4bf..0000000000000000000000000000000000000000
--- a/.gitlab/ci/_global.gitlab-ci.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-################################################################################
-# Centralized definitions of common job parameter values.                      #
-# Parameters with many optional configurations may be in separate files.       #
-#                                                                              #
-################################################################################
-variables:
-  GIT_SUBMODULE_STRATEGY: recursive
-  OMP_NUM_THREADS: 4
-  GIT_SSL_NO_VERIFY: 1
-  DEBIAN_FRONTEND: noninteractive
-
-# See https://docs.gitlab.com/ee/ci/yaml/workflow.html#switch-between-branch-pipelines-and-merge-request-pipelines
-workflow:
-  rules:
-    - if: $CI_PIPELINE_SOURCE == "merge_request_event"
-    - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
-      when: never
-    - if: $CI_COMMIT_BRANCH
-
-default:
-  image: nvidia/cuda:12.2.0-devel-ubuntu22.04
-  before_script:
-    - apt update
-    - apt install -y cmake cppcheck python-is-python3 pip git gcovr
diff --git a/.gitlab/ci/build.gitlab-ci.yml b/.gitlab/ci/build.gitlab-ci.yml
deleted file mode 100644
index a4579e2951ccbafc4335ae428c62eba94c0757e5..0000000000000000000000000000000000000000
--- a/.gitlab/ci/build.gitlab-ci.yml
+++ /dev/null
@@ -1,154 +0,0 @@
-build:ubuntu_cpp:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON -DCOVERAGE=ON ..
-    - make -j4 all install
-
-  artifacts:
-    expire_in: 1 week
-    paths:
-      - build_cpp/
-      - install_cpp/
-
-build:ubuntu_cpp_g++10:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - apt install -y g++-10
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - export CXX=/usr/bin/g++-10
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON -DCOVERAGE=ON ..
-    - make -j4 all install
-
-build:ubuntu_cpp_g++12:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - apt install -y g++-12
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - export CXX=/usr/bin/g++-12
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON -DCOVERAGE=ON ..
-    - make -j4 all install
-
-build:ubuntu_cpp_clang12:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - apt install -y clang-12
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - export CXX=/usr/bin/clang++-12
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON -DCOVERAGE=ON ..
-    - make -j4 all install
-
-build:ubuntu_cpp_clang15:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - apt install -y clang-15
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - export CXX=/usr/bin/clang++-15
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug -DWERROR=ON -DCOVERAGE=ON ..
-    - make -j4 all install
-
-build:ubuntu_python:
-  stage: build
-  needs: []
-  tags:
-    - docker
-
-  script:
-    - python3 -m pip install virtualenv
-    - virtualenv venv
-    - source venv/bin/activate
-    # Numpy dependancy for unit test
-    - python3 -m pip install -r requirements.txt
-    - python3 -m pip install .
-  artifacts:
-    expire_in: 1 week
-    paths:
-      - venv/
-
-build:windows_cpp:
-  stage: build
-  needs: []
-  tags:
-    - windows
-
-  image: buildtools
-  before_script:
-    # Install Chocolatey
-    - Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
-    # Install dependencies
-    - choco install cmake.install --installargs '"ADD_CMAKE_TO_PATH=System"' -Y
-    - choco install git -Y
-    - choco install python -Y
-    # Update PATH
-    - $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
-  script:
-    - mkdir -p build_cpp
-    - mkdir -p install_cpp
-    - cd build_cpp
-    - cmake -DCMAKE_INSTALL_PREFIX:PATH=../install_cpp -DCMAKE_BUILD_TYPE=Debug ..
-    - cmake --build . -j2
-    - cmake --install . --config Debug
-
-  artifacts:
-    expire_in: 1 week
-    paths:
-      - build_cpp/
-      - install_cpp/
-
-build:windows_python:
-  stage: build
-  needs: []
-  tags:
-    - windows
-
-  image: buildtools
-  before_script:
-    # Install Chocolatey
-    - Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
-    # Install dependencies
-    - choco install cmake.install --installargs '"ADD_CMAKE_TO_PATH=System"' -Y
-    - choco install git -Y
-    - choco install python -Y
-    # Update PATH
-    - $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
-  script:
-    - python -m pip install virtualenv
-    - virtualenv venv
-    - venv\Scripts\Activate.ps1
-    # Numpy dependancy for unit test
-    - python -m pip install -r requirements.txt
-    - python -m pip install .
-  artifacts:
-    expire_in: 1 week
-    paths:
-      - venv/
diff --git a/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.ps1 b/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.ps1
new file mode 100644
index 0000000000000000000000000000000000000000..c2715ea5550432838d3cc8692e97204b278d2c85
--- /dev/null
+++ b/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.ps1
@@ -0,0 +1,23 @@
+$ErrorActionPreference = "Stop"
+
+# Retrieve and clean the dependencies string from the environment variable
+$AIDGE_DEPENDENCIES = $env:AIDGE_DEPENDENCIES -split ' '
+Write-Host "Aidge dependencies : $AIDGE_DEPENDENCIES"
+if ( $($AIDGE_DEPENDENCIES.Length) -eq 0) {
+        Write-Host "- No dependencies provided for current repsitory"
+        New-Item -ItemType Directory -Force -Path ".\build" | Out-Null
+        Remove-Item -Path ".\build\*" -Recurse -Force
+    } else {
+        Write-Host "Retrieving given dependencies to build current package : $AIDGE_DEPENDENCIES"
+    foreach ($dep in $($AIDGE_DEPENDENCIES -split " ")) {
+        Write-Host "Retrieving : $dep"
+        $curr_loc=$(Get-Location)
+        Set-Location ../$dep
+        Get-Location 
+        Get-ChildItem .
+        New-Item -Path ".\build" -ItemType Directory -Force | Out-Null
+        Get-ChildItem -Path ".\build" -File | Remove-Item -Force
+        python -m pip install . -v
+        Set-Location $curr_loc
+    }
+}
diff --git a/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.sh b/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0303db5f056772d9f6227bf7a8b7910c2572ea1b
--- /dev/null
+++ b/.gitlab/ci/cibuildwheel_build_deps_before_build_wheel.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+set -e
+if [[ "$1" == "" ]]; then 
+  echo "build aidge deps in cibuildwheel container before building wheel."
+  echo "search path defines where the dependencies will be searched."
+  echo "Hint : In wheel containers, files are mounted on /host by default."
+  echo "\nusage : ./cibuildwheel_build_deps_before_build_wheel.sh $search_path"
+fi
+set -x
+if [[ $AIDGE_DEPENDENCIES ==  "" ]]; then # case for aidge_ core
+  mkdir -p build # creating build if its not already there to hold the build of cpp files
+  rm -rf build/* # build from scratch
+else 
+  for repo in $AIDGE_DEPENDENCIES ; do # case for other projects
+    search_path=$1
+    REPO_PATH=$(find $search_path ! -writable -prune -o  -type d \
+                                    -name "$repo"                    \
+                                    -not -path "*/install/*"         \
+                                    -not -path "*/.git/*"            \
+                                    -not -path "*/miniconda/*"       \
+                                    -not -path "*/conda/*"           \
+                                    -not -path "*/.local/*"          \
+                                    -not -path "*/lib/*"             \
+                                    -not -path "*/$repo/$repo/*"     \
+                                    -not -path "*/proc/*"           \
+                                    -print -quit)
+    if [[ -z "$REPO_PATH" ]]; then 
+      echo "ERROR : dependency $repo not found in search_path \"$search_path\". ABORTING."
+      exit -1
+    fi
+
+    cd $REPO_PATH
+    mkdir -p build # creating build if its not already there to hold the build of cpp files
+    rm -rf build/* # build from scratch
+    pip install . -v
+    cd -
+  done
+fi
+set +x
+set +e
diff --git a/.gitlab/ci/coverage.gitlab-ci.yml b/.gitlab/ci/coverage.gitlab-ci.yml
deleted file mode 100644
index 3c7b7654190e0768adc6a904f1cb548f020b0c92..0000000000000000000000000000000000000000
--- a/.gitlab/ci/coverage.gitlab-ci.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-coverage:ubuntu_cpp:
-  stage: coverage
-  needs: ["build:ubuntu_cpp"]
-  tags:
-    - docker
-  script:
-    - cd build_cpp
-    - ctest --output-on-failure
-    - gcovr --xml-pretty --exclude-unreachable-branches --print-summary -o coverage.xml --root ${CI_PROJECT_DIR} --filter '\.\./include/' --filter '\.\./src/'
-  coverage: /^\s*lines:\s*\d+.\d+\%/
-  artifacts:
-    name: ${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHA}
-    expire_in: 2 days
-    reports:
-      coverage_report:
-        coverage_format: cobertura
-        path: build_cpp/coverage.xml
-
-coverage:ubuntu_python:
-  stage: coverage
-  needs: ["build:ubuntu_python"]
-  tags:
-    - docker
-  script:
-    - source venv/bin/activate
-    - python3 -m pip install numpy coverage
-    - cd ${CI_PROJECT_NAME}
-    # Retrieve the installation path of the module, since it is installed with pip.
-    - export MODULE_LOCATION=`python -c "import ${CI_PROJECT_NAME} as _; print(_.__path__[0])"`
-    - python3 -m coverage run --source=$MODULE_LOCATION -m unittest discover -s unit_tests/ -v -b
-    - python3 -m coverage report
-    - python3 -m coverage xml
-  coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
-  artifacts:
-    reports:
-      coverage_report:
-        coverage_format: cobertura
-        path: ${CI_PROJECT_NAME}/coverage.xml
diff --git a/.gitlab/ci/static_analysis.gitlab-ci.yml b/.gitlab/ci/static_analysis.gitlab-ci.yml
deleted file mode 100644
index 3955b87d4efdd9b3610b661779ab9709320754f2..0000000000000000000000000000000000000000
--- a/.gitlab/ci/static_analysis.gitlab-ci.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-static_analysis:cpp:
-  stage: static_analysis
-  tags:
-    - static_analysis
-  allow_failure: true
-  script:
-    - mkdir -p $CI_COMMIT_REF_NAME
-    - cppcheck -j 4 --enable=all --inconclusive --force --xml --xml-version=2 . 2> cppcheck-result.xml
-    - python -m pip install Pygments
-    - cppcheck-htmlreport --file=cppcheck-result.xml --report-dir=$CI_COMMIT_REF_NAME --source-dir=.
-    - python3 -m pip install -U cppcheck_codequality
-    - cppcheck-codequality --input-file=cppcheck-result.xml --output-file=cppcheck.json
-    - mkdir -p public/cpp
-    - mv $CI_COMMIT_REF_NAME public/cpp/
-  artifacts:
-    paths: 
-      - public
-    reports:
-      codequality: cppcheck.json
-
-static_analysis:python:
-  stage: static_analysis
-  tags:
-    - static_analysis
-  allow_failure: true
-  script:
-    - pip install pylint
-    - pip install pylint-gitlab
-    - pylint --rcfile=.pylintrc --exit-zero --output-format=pylint_gitlab.GitlabCodeClimateReporter ${CI_PROJECT_NAME}/ > codeclimate.json
-    - pylint --rcfile=.pylintrc --exit-zero --output-format=pylint_gitlab.GitlabPagesHtmlReporter ${CI_PROJECT_NAME}/ > pylint.html
-    - mkdir -p public/python/$CI_COMMIT_REF_NAME
-    - mv pylint.html public/python/$CI_COMMIT_REF_NAME/
-  artifacts:
-    paths:
-      - public
-    reports:
-      codequality: codeclimate.json
\ No newline at end of file
diff --git a/.gitlab/ci/test.gitlab-ci.yml b/.gitlab/ci/test.gitlab-ci.yml
deleted file mode 100644
index 81e6ca9ac5b868287aa0ef27040c0ead785d3639..0000000000000000000000000000000000000000
--- a/.gitlab/ci/test.gitlab-ci.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-test:ubuntu_cpp:
-  stage: test
-  needs: ["build:ubuntu_cpp"]
-  tags:
-    - docker
-  script:
-    - cd build_cpp
-    - ctest --output-junit ctest-results.xml --output-on-failure
-  artifacts:
-    reports:
-      junit: build_cpp/ctest-results.xml
-
-test:ubuntu_python:
-  stage: test
-  needs: ["build:ubuntu_python"]
-  tags:
-    - docker
-  script:
-    - source venv/bin/activate
-    - cd ${CI_PROJECT_NAME}
-    - python3 -m pip install unittest-xml-reporting
-    - python3 -m pip list
-    # Run on discovery all tests located in core/unit_tests/python
-    - python3 -m xmlrunner discover -s unit_tests/ -v -b --output-file xmlrunner-results.xml
-  artifacts:
-    reports:
-      junit: ${CI_PROJECT_NAME}/xmlrunner-results.xml
-
-test:windows_cpp:
-  stage: test
-  needs: ["build:windows_cpp"]
-  tags:
-    - windows
-  image: buildtools
-  before_script:
-    # Install Chocolatey
-    - Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
-    # Install dependencies
-    - choco install cmake.install --installargs '"ADD_CMAKE_TO_PATH=System"' -Y
-    - choco install python -Y
-    # Update PATH
-    - $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
-  script:
-    - cd build_cpp
-    - ctest --output-junit ctest-results.xml --output-on-failure
-  artifacts:
-    reports:
-      junit: build_cpp/ctest-results.xml
diff --git a/.pylintrc b/.pylintrc
deleted file mode 100644
index 03c0cf31f3e63bcae09a45e9a8e6694a78d2f4b1..0000000000000000000000000000000000000000
--- a/.pylintrc
+++ /dev/null
@@ -1,644 +0,0 @@
-[MASTER]
-
-# A comma-separated list of package or module names from where C extensions may
-# be loaded. Extensions are loading into the active Python interpreter and may
-# run arbitrary code.
-extension-pkg-allow-list= aidge_core, torch, tensorflow
-
-# A comma-separated list of package or module names from where C extensions may
-# be loaded. Extensions are loading into the active Python interpreter and may
-# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
-# for backward compatibility.)
-extension-pkg-whitelist=
-
-# Return non-zero exit code if any of these messages/categories are detected,
-# even if score is above --fail-under value. Syntax same as enable. Messages
-# specified are enabled, while categories only check already-enabled messages.
-fail-on=
-
-# Specify a score threshold to be exceeded before program exits with error.
-fail-under=0.0
-
-# Files or directories to be skipped. They should be base names, not paths.
-ignore=CVS
-
-# Add files or directories matching the regex patterns to the ignore-list. The
-# regex matches against paths.
-ignore-paths=
-
-# Files or directories matching the regex patterns are skipped. The regex
-# matches against base names, not paths.
-ignore-patterns=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
-# number of processors available to use.
-jobs=1
-
-# Control the amount of potential inferred values when inferring a single
-# object. This can help the performance when dealing with large functions or
-# complex, nested conditions.
-limit-inference-results=100
-
-# List of plugins (as comma separated values of python module names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# When enabled, pylint would attempt to guess common misconfiguration and emit
-# user-friendly hints instead of false-positive error messages.
-suggestion-mode=yes
-
-# Allow loading of arbitrary C extensions. Extensions are imported into the
-# active Python interpreter and may run arbitrary code.
-unsafe-load-any-extension=no
-
-
-[MESSAGES CONTROL]
-
-# Only show warnings with the listed confidence levels. Leave empty to show
-# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
-confidence=
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifiers separated by comma (,) or put this
-# option multiple times (only on the command line, not in the configuration
-# file where it should appear only once). You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
-# you want to run only the similarities checker, you can use "--disable=all
-# --enable=similarities". If you want to run only the classes checker, but have
-# no Warning level messages displayed, use "--disable=all --enable=classes
-# --disable=W".
-disable=print-statement,
-        parameter-unpacking,
-        unpacking-in-except,
-        old-raise-syntax,
-        backtick,
-        long-suffix,
-        old-ne-operator,
-        old-octal-literal,
-        import-star-module-level,
-        non-ascii-bytes-literal,
-        raw-checker-failed,
-        bad-inline-option,
-        locally-disabled,
-        file-ignored,
-        suppressed-message,
-        useless-suppression,
-        deprecated-pragma,
-        use-symbolic-message-instead,
-        apply-builtin,
-        basestring-builtin,
-        buffer-builtin,
-        cmp-builtin,
-        coerce-builtin,
-        execfile-builtin,
-        file-builtin,
-        long-builtin,
-        raw_input-builtin,
-        reduce-builtin,
-        standarderror-builtin,
-        unicode-builtin,
-        xrange-builtin,
-        coerce-method,
-        delslice-method,
-        getslice-method,
-        setslice-method,
-        no-absolute-import,
-        old-division,
-        dict-iter-method,
-        dict-view-method,
-        next-method-called,
-        metaclass-assignment,
-        indexing-exception,
-        raising-string,
-        reload-builtin,
-        oct-method,
-        hex-method,
-        nonzero-method,
-        cmp-method,
-        input-builtin,
-        round-builtin,
-        intern-builtin,
-        unichr-builtin,
-        map-builtin-not-iterating,
-        zip-builtin-not-iterating,
-        range-builtin-not-iterating,
-        filter-builtin-not-iterating,
-        using-cmp-argument,
-        eq-without-hash,
-        div-method,
-        idiv-method,
-        rdiv-method,
-        exception-message-attribute,
-        invalid-str-codec,
-        sys-max-int,
-        bad-python3-import,
-        deprecated-string-function,
-        deprecated-str-translate-call,
-        deprecated-itertools-function,
-        deprecated-types-field,
-        next-method-defined,
-        dict-items-not-iterating,
-        dict-keys-not-iterating,
-        dict-values-not-iterating,
-        deprecated-operator-function,
-        deprecated-urllib-function,
-        xreadlines-attribute,
-        deprecated-sys-function,
-        exception-escape,
-        comprehension-escape,
-        c-extension-no-member,
-        too-many-locals,
-        missing-class-docstring,
-        missing-function-docstring,
-        too-many-ancestor,
-        too-many-arguments,
-        protected-access,
-        too-many-branches,
-        too-many-ancestors,
-        wrong-import-order,
-        wrong-import-position,
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time (only on the command line, not in the configuration file where
-# it should appear only once). See also the "--disable" option for examples.
-enable=c-extension-no-member
-
-
-[REPORTS]
-
-# Python expression which should return a score less than or equal to 10. You
-# have access to the variables 'error', 'warning', 'refactor', and 'convention'
-# which contain the number of messages in each category, as well as 'statement'
-# which is the total number of statements analyzed. This score is used by the
-# global evaluation report (RP0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Template used to display messages. This is a python new-style format string
-# used to format the message information. See doc for all details.
-#msg-template=
-
-# Set the output format. Available formats are text, parseable, colorized, json
-# and msvs (visual studio). You can also give a reporter class, e.g.
-# mypackage.mymodule.MyReporterClass.
-output-format=text
-
-# Tells whether to display a full report or only the messages.
-reports=no
-
-# Activate the evaluation score.
-score=yes
-
-
-[REFACTORING]
-
-# Maximum number of nested blocks for function / method body
-max-nested-blocks=5
-
-# Complete name of functions that never returns. When checking for
-# inconsistent-return-statements if a never returning function is called then
-# it will be considered as an explicit return statement and no message will be
-# printed.
-never-returning-functions=sys.exit,argparse.parse_error
-
-
-[BASIC]
-
-# Naming style matching correct argument names.
-argument-naming-style=snake_case
-
-# Regular expression matching correct argument names. Overrides argument-
-# naming-style.
-#argument-rgx=
-
-# Naming style matching correct attribute names.
-attr-naming-style=snake_case
-
-# Regular expression matching correct attribute names. Overrides attr-naming-
-# style.
-#attr-rgx=
-
-# Bad variable names which should always be refused, separated by a comma.
-bad-names=foo,
-          bar,
-          baz,
-          toto,
-          tutu,
-          tata
-
-# Bad variable names regexes, separated by a comma. If names match any regex,
-# they will always be refused
-bad-names-rgxs=
-
-# Naming style matching correct class attribute names.
-class-attribute-naming-style=any
-
-# Regular expression matching correct class attribute names. Overrides class-
-# attribute-naming-style.
-#class-attribute-rgx=
-
-# Naming style matching correct class constant names.
-class-const-naming-style=UPPER_CASE
-
-# Regular expression matching correct class constant names. Overrides class-
-# const-naming-style.
-#class-const-rgx=
-
-# Naming style matching correct class names.
-class-naming-style=PascalCase
-
-# Regular expression matching correct class names. Overrides class-naming-
-# style.
-#class-rgx=
-
-# Naming style matching correct constant names.
-const-naming-style=UPPER_CASE
-
-# Regular expression matching correct constant names. Overrides const-naming-
-# style.
-#const-rgx=
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-docstring-min-length=-1
-
-# Naming style matching correct function names.
-function-naming-style=snake_case
-
-# Regular expression matching correct function names. Overrides function-
-# naming-style.
-#function-rgx=
-
-# Good variable names which should always be accepted, separated by a comma.
-good-names=i,
-           j,
-           k,
-           ex,
-           Run,
-           _,
-
-# Good variable names regexes, separated by a comma. If names match any regex,
-# they will always be accepted
-good-names-rgxs=
-
-# Include a hint for the correct naming format with invalid-name.
-include-naming-hint=no
-
-# Naming style matching correct inline iteration names.
-inlinevar-naming-style=any
-
-# Regular expression matching correct inline iteration names. Overrides
-# inlinevar-naming-style.
-#inlinevar-rgx=
-
-# Naming style matching correct method names.
-method-naming-style=snake_case
-
-# Regular expression matching correct method names. Overrides method-naming-
-# style.
-#method-rgx=
-
-# Naming style matching correct module names.
-module-naming-style=snake_case
-
-# Regular expression matching correct module names. Overrides module-naming-
-# style.
-#module-rgx=
-
-# Colon-delimited sets of names that determine each other's naming style when
-# the name regexes allow several styles.
-name-group=
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-no-docstring-rgx=^_
-
-# List of decorators that produce properties, such as abc.abstractproperty. Add
-# to this list to register other decorators that produce valid properties.
-# These decorators are taken in consideration only for invalid-name.
-property-classes=abc.abstractproperty
-
-# Naming style matching correct variable names.
-variable-naming-style=snake_case
-
-# Regular expression matching correct variable names. Overrides variable-
-# naming-style.
-#variable-rgx=
-
-
-[FORMAT]
-
-# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
-expected-line-ending-format=
-
-# Regexp for a line that is allowed to be longer than the limit.
-ignore-long-lines=^\s*(# )?<?https?://\S+>?$
-
-# Number of spaces of indent required inside a hanging or continued line.
-indent-after-paren=4
-
-# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
-# tab).
-indent-string='    '
-
-# Maximum number of characters on a single line.
-max-line-length=200
-
-# Maximum number of lines in a module.
-max-module-lines=1000
-
-# Allow the body of a class to be on the same line as the declaration if body
-# contains single statement.
-single-line-class-stmt=no
-
-# Allow the body of an if to be on the same line as the test if there is no
-# else.
-single-line-if-stmt=no
-
-
-[LOGGING]
-
-# The type of string formatting that logging methods do. `old` means using %
-# formatting, `new` is for `{}` formatting.
-logging-format-style=old
-
-# Logging modules to check that the string format arguments are in logging
-# function parameter format.
-logging-modules=logging
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,
-      XXX,
-      TODO
-
-# Regular expression of note tags to take in consideration.
-#notes-rgx=
-
-
-[SIMILARITIES]
-
-# Comments are removed from the similarity computation
-ignore-comments=yes
-
-# Docstrings are removed from the similarity computation
-ignore-docstrings=yes
-
-# Imports are removed from the similarity computation
-ignore-imports=no
-
-# Signatures are removed from the similarity computation
-ignore-signatures=no
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-
-[SPELLING]
-
-# Limits count of emitted suggestions for spelling mistakes.
-max-spelling-suggestions=4
-
-# Spelling dictionary name. Available dictionaries: none. To make it work,
-# install the 'python-enchant' package.
-spelling-dict=
-
-# List of comma separated words that should be considered directives if they
-# appear and the beginning of a comment and should not be checked.
-spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
-
-# List of comma separated words that should not be checked.
-spelling-ignore-words=
-
-# A path to a file that contains the private dictionary; one word per line.
-spelling-private-dict-file=
-
-# Tells whether to store unknown words to the private dictionary (see the
-# --spelling-private-dict-file option) instead of raising a message.
-spelling-store-unknown-words=no
-
-
-[STRING]
-
-# This flag controls whether inconsistent-quotes generates a warning when the
-# character used as a quote delimiter is used inconsistently within a module.
-check-quote-consistency=no
-
-# This flag controls whether the implicit-str-concat should generate a warning
-# on implicit string concatenation in sequences defined over several lines.
-check-str-concat-over-line-jumps=no
-
-
-[TYPECHECK]
-
-# List of decorators that produce context managers, such as
-# contextlib.contextmanager. Add to this list to register other decorators that
-# produce valid context managers.
-contextmanager-decorators=contextlib.contextmanager
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E1101 when accessed. Python regular
-# expressions are accepted.
-generated-members=
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# Tells whether to warn about missing members when the owner of the attribute
-# is inferred to be None.
-ignore-none=yes
-
-# This flag controls whether pylint should warn about no-member and similar
-# checks whenever an opaque object is returned when inferring. The inference
-# can return multiple potential results while evaluating a Python object, but
-# some branches might not be evaluated, which results in partial inference. In
-# that case, it might be useful to still emit no-member and other checks for
-# the rest of the inferred objects.
-ignore-on-opaque-inference=yes
-
-# List of class names for which member attributes should not be checked (useful
-# for classes with dynamically set attributes). This supports the use of
-# qualified names.
-ignored-classes=optparse.Values,
-                thread._local,
-                _thread._local,
-                aidge.global_variables,
-                aidge.cells.abstract_cell.Trainable,
-                torch,
-                tensorflow,
-
-# List of module names for which member attributes should not be checked
-# (useful for modules/projects where namespaces are manipulated during runtime
-# and thus existing member attributes cannot be deduced by static analysis). It
-# supports qualified module names, as well as Unix pattern matching.
-ignored-modules= aidge_core
-
-# Show a hint with possible names when a member name was not found. The aspect
-# of finding the hint is based on edit distance.
-missing-member-hint=yes
-
-# The minimum edit distance a name should have in order to be considered a
-# similar match for a missing member name.
-missing-member-hint-distance=1
-
-# The total number of similar names that should be taken in consideration when
-# showing a hint for a missing member.
-missing-member-max-choices=1
-
-# List of decorators that change the signature of a decorated function.
-signature-mutators=
-
-
-[VARIABLES]
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid defining new builtins when possible.
-additional-builtins=
-
-# Tells whether unused global variables should be treated as a violation.
-allow-global-unused-variables=yes
-
-# List of names allowed to shadow builtins
-allowed-redefined-builtins=
-
-# List of strings which can identify a callback function by name. A callback
-# name must start or end with one of those strings.
-callbacks=cb_,
-          _cb
-
-# A regular expression matching the name of dummy variables (i.e. expected to
-# not be used).
-dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore.
-ignored-argument-names=_.*|^ignored_|^unused_
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# List of qualified module names which can have objects that can redefine
-# builtins.
-redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
-
-
-[CLASSES]
-
-# Warn about protected attribute access inside special methods
-check-protected-access-in-special-methods=no
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,
-                      __new__,
-                      setUp,
-                      __post_init__
-
-# List of member names, which should be excluded from the protected access
-# warning.
-exclude-protected=_asdict,
-                  _fields,
-                  _replace,
-                  _source,
-                  _make
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls
-
-# List of valid names for the first argument in a metaclass class method.
-valid-metaclass-classmethod-first-arg=cls
-
-
-[DESIGN]
-
-# List of qualified class names to ignore when countint class parents (see
-# R0901)
-ignored-parents=
-
-# Maximum number of arguments for function / method.
-max-args=5
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=7
-
-# Maximum number of boolean expressions in an if statement (see R0916).
-max-bool-expr=5
-
-# Maximum number of branch for function / method body.
-max-branches=12
-
-# Maximum number of locals for function / method body.
-max-locals=15
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=20
-
-# Maximum number of return / yield for function / method body.
-max-returns=6
-
-# Maximum number of statements in function / method body.
-max-statements=50
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-
-[IMPORTS]
-
-# List of modules that can be imported at any level, not just the top level
-# one.
-allow-any-import-level=
-
-# Allow wildcard imports from modules that define __all__.
-allow-wildcard-with-all=no
-
-# Analyse import fallback blocks. This can be used to support both Python 2 and
-# 3 compatible code, which means that the block might have code that exists
-# only in one or another interpreter, leading to false positives when analysed.
-analyse-fallback-blocks=no
-
-# Deprecated modules which should not be used, separated by a comma.
-deprecated-modules=
-
-# Output a graph (.gv or any supported image format) of external dependencies
-# to the given file (report RP0402 must not be disabled).
-ext-import-graph=
-
-# Output a graph (.gv or any supported image format) of all (i.e. internal and
-# external) dependencies to the given file (report RP0402 must not be
-# disabled).
-import-graph=
-
-# Output a graph (.gv or any supported image format) of internal dependencies
-# to the given file (report RP0402 must not be disabled).
-int-import-graph=
-
-# Force import order to recognize a module as part of the standard
-# compatibility libraries.
-known-standard-library=
-
-# Force import order to recognize a module as part of a third party library.
-known-third-party=enchant
-
-# Couples of modules and preferred modules, separated by a comma.
-preferred-modules=
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "BaseException, Exception".
-overgeneral-exceptions=BaseException,
-                       Exception
\ No newline at end of file
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ec6aacd723a50eba2bfed0184941410340c6a7aa..0e8c2979f0750ffccb22326ce923f8b1638099bd 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,21 +1,25 @@
 cmake_minimum_required(VERSION 3.15)
+set(CXX_STANDARD 14)
 
-file(READ "${CMAKE_SOURCE_DIR}/version.txt" version)
-file(READ "${CMAKE_SOURCE_DIR}/project_name.txt" project)
+file(STRINGS "${CMAKE_SOURCE_DIR}/version.txt" version)
 
-message(STATUS "Project name: ${project}")
+project(aidge_core
+        VERSION ${version}
+        DESCRIPTION "Core algorithms for operators and graph of the AIDGE framework" 
+        LANGUAGES CXX)
+message(STATUS "Project name: ${CMAKE_PROJECT_NAME}")
 message(STATUS "Project version: ${version}")
+add_definitions(-DPROJECT_VERSION="${version}")
 
-# Note : project name is {project} and python module name is also {project}
-set(module_name _${project}) # target name
-
+message(STATUS "Project name: ${CMAKE_PROJECT_NAME}")
+message(STATUS "Project version: ${version}")
 
-project(${project})
-set(CXX_STANDARD 14)
+# Note : project name is {project} and python module name is also {project}
+set(module_name _${CMAKE_PROJECT_NAME}) # target name
 
 ##############################################
 # Define options
-option(PYBIND "python binding" ON)
+option(PYBIND "python binding" OFF)
 option(WERROR "Warning as error" OFF)
 option(TEST "Enable tests" ON)
 option(COVERAGE "Enable coverage" OFF)
@@ -24,7 +28,6 @@ option(ENABLE_ASAN "Enable ASan (AddressSanitizer) for runtime analysis of memor
 ##############################################
 # Import utils CMakeLists
 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
-include(PybindModuleCreation)
 
 if(CMAKE_COMPILER_IS_GNUCXX AND COVERAGE)
     Include(CodeCoverage)
@@ -34,10 +37,12 @@ endif()
 # Find system dependencies
 Include(FetchContent)
 
+set(FMT_VERSION 10.2.1)
+message(STATUS "Retrieving fmt ${FMT_VERSION} from git")
 FetchContent_Declare(
     fmt
     GIT_REPOSITORY https://github.com/fmtlib/fmt.git
-    GIT_TAG        10.2.1 # or a later release
+    GIT_TAG        ${FMT_VERSION} # or a later release
 )
 
 set(FMT_SYSTEM_HEADERS ON)
@@ -79,14 +84,15 @@ endif()
 
 # PYTHON BINDING
 if (PYBIND)
-    generate_python_binding(${project} ${module_name})
-
     # Handles Python + pybind11 headers dependencies
+    include(PybindModuleCreation)
+    generate_python_binding(${CMAKE_PROJECT_NAME} ${module_name})
+
     target_link_libraries(${module_name}
         PUBLIC
             pybind11::pybind11
         PRIVATE
-            Python::Python
+            Python::Module
         )
 endif()
 
@@ -133,11 +139,15 @@ endif()
 
 ##############################################
 # Installation instructions
+if(NOT $ENV{AIDGE_INSTALL} STREQUAL "")
+    set(CMAKE_INSTALL_PREFIX $ENV{AIDGE_INSTALL})
+    message(WARNING "CMAKE_INSTALL_PREFIX set to env variable AIDGE_INSTALL by default = ${CMAKE_INSTALL_PREFIX}")
+endif()
 
 include(GNUInstallDirs)
-set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${project})
+set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${CMAKE_PROJECT_NAME})
 
-install(TARGETS ${module_name} EXPORT ${project}-targets
+install(TARGETS ${module_name} EXPORT ${CMAKE_PROJECT_NAME}-targets
   LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
   ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
   RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
@@ -148,8 +158,8 @@ install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
 
 #Export the targets to a script
 
-install(EXPORT ${project}-targets
- FILE "${project}-targets.cmake"
+install(EXPORT ${CMAKE_PROJECT_NAME}-targets
+ FILE "${CMAKE_PROJECT_NAME}-targets.cmake"
  DESTINATION ${INSTALL_CONFIGDIR}
 #  COMPONENT ${module_name}
 )
@@ -158,32 +168,37 @@ install(EXPORT ${project}-targets
 include(CMakePackageConfigHelpers)
 
 write_basic_package_version_file(
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
     VERSION ${version}
     COMPATIBILITY AnyNewerVersion
 )
 
-configure_package_config_file("${project}-config.cmake.in"
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
+configure_package_config_file("${CMAKE_PROJECT_NAME}-config.cmake.in"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
     INSTALL_DESTINATION ${INSTALL_CONFIGDIR}
 )
 
 #Install the config, configversion and custom find modules
 install(FILES
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
     DESTINATION ${INSTALL_CONFIGDIR}
 )
 
 ##############################################
 ## Exporting from the build tree
-export(EXPORT ${project}-targets
-    FILE "${CMAKE_CURRENT_BINARY_DIR}/${project}-targets.cmake")
+message(STATUS "Exporting created targets to use them in another build")
+export(EXPORT ${CMAKE_PROJECT_NAME}-targets
+    FILE "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-targets.cmake")
 
 
 ##############################################
 ## Add test
 if(TEST)
+    if(PYBIND)
+        message(FATAL_ERROR "PYBIND and TEST are both enabled. But cannot compile with catch_2.\nChoose between pybind and Catch2 for compilation.")
+    endif()
     enable_testing()
     add_subdirectory(unit_tests)
 endif()
+
diff --git a/MANIFEST.in b/MANIFEST.in
index 7a01a2d6c0caf880738df8393567fb169a07be7e..ae5b7c7c2e07eef97ef72bdb79cca94f8124981b 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1,9 @@
-include MANIFEST.in
-include LICENSE
-include README.md
-recursive-include aidge_core *
-include setup.py
-include version.txt
+include README.md LICENCE
+recursive-include aidge_core *.py 
+recursive-exclude aidge_core/unit_tests *.py
+
+recursive-include aidge_core/aidge_export_aidge *
+recursive-include include *.hpp
+recursive-include src *.cpp
+recursive-include python_binding *.cpp
+include CMakeLists.txt
diff --git a/README.md b/README.md
index 5b07e147cb05c2fa1a6d275d567dda218b131996..ef2191699d0f650ee714ff73a31c9f5c36f170fd 100644
--- a/README.md
+++ b/README.md
@@ -4,20 +4,18 @@
 
 You can find here the C++ code of the Core library of Aidge.
 
-## Pip installation
-
+[TOC]
 
+## Pip installation
 
 To install aidge_core using pip, run the following command in your python environnement :
 ``` bash
 pip install . -v
 ```
-
-**Note:** you can specify a custom install folder by setting an environment variable:
-
-``` bash
-export AIDGE_INSTALL='<path_to_aidge>/install'
-```
+> **TIPS :** Use environment variables to change compilation options :
+> - `AIDGE_INSTALL` : to set the installation folder. Defaults to /usr/local/lib
+> - `AIDGE_PYTHON_BUILD_TYPE` : to set the compilation mode to **Debug** or **Release** 
+> - `AIDGE_BUILD_GEN` : to set the build backend
 
 ## Standard C++ Compilation
 
@@ -47,15 +45,12 @@ make all install
 If you have compiled with PyBind you can find at the root of the ``build`` file the python lib ``aidge_core.cpython*.so``
 
 ## Run tests
-
 ### CPP
 
 Inside of the build file run:
 
 ```bash
-
 ctest --output-on-failure
-
 ```
 
 ### Python
diff --git a/aidge_core/__init__.py b/aidge_core/__init__.py
index 4234747b94b25b35a6836a36ad6331c1c8a4bc66..652f485a9d3de6869b55613549172d49913e8509 100644
--- a/aidge_core/__init__.py
+++ b/aidge_core/__init__.py
@@ -7,7 +7,8 @@ http://www.eclipse.org/legal/epl-2.0.
 
 SPDX-License-Identifier: EPL-2.0
 """
-from aidge_core.aidge_core import * # import so generated by PyBind
-from aidge_core.export_utils import ExportNode, generate_file, generate_str
-import aidge_core.utils
-from aidge_core.aidge_export_aidge import *
+from .aidge_core import * # import so generated by PyBind
+from .export_utils import ExportNode, generate_file, generate_str
+from .aidge_export_aidge import *
+from . import utils
+from ._version import *
diff --git a/aidge_core/aidge_export_aidge/static/CMakeLists.txt b/aidge_core/aidge_export_aidge/static/CMakeLists.txt
index c52e09e82699b1f2c0f8e10ed9f4ab83fbb0f10f..4220bb9d502474301cf748252930ff8bdd5c97e3 100644
--- a/aidge_core/aidge_export_aidge/static/CMakeLists.txt
+++ b/aidge_core/aidge_export_aidge/static/CMakeLists.txt
@@ -1,40 +1,47 @@
 cmake_minimum_required(VERSION 3.15)
+set(CXX_STANDARD 14)
 
-
+file(STRINGS "${CMAKE_SOURCE_DIR}/project_name.txt" project_name)
 file(STRINGS "${CMAKE_SOURCE_DIR}/version.txt" version)
-file(STRINGS "${CMAKE_SOURCE_DIR}/project_name.txt" project)
 
-message(STATUS "Project name: ${project}")
-message(STATUS "Project version: ${version}")
+project(${project_name}
+        VERSION ${version}
+        DESCRIPTION "Export of aidge"
+        LANGUAGES CXX)
 
-# Note : project name is {project} and python module name is also {project}
-set(module_name _${project}) # target name
+message(STATUS "Project name: ${CMAKE_PROJECT_NAME}")
+message(STATUS "Project version: ${version}")
 
-project(${project})
-set(CXX_STANDARD 14)
+# Note : project name is ${CMAKE_PROJECT_NAME} and python module name is also ${CMAKE_PROJECT_NAME}
+set(module_name _${CMAKE_PROJECT_NAME}) # target name
 
 ##############################################
 # Define options
 option(PYBIND "python binding" ON)
 option(WERROR "Warning as error" OFF)
-option(TEST "Enable tests" ON)
+option(TEST "Enable tests" OFF)
 option(COVERAGE "Enable coverage" OFF)
 option(ENABLE_ASAN "Enable ASan (AddressSanitizer) for runtime analysis of memory use (over/underflow, memory leak, ...)" OFF)
 
 ##############################################
 # Import utils CMakeLists
 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
-include(PybindModuleCreation)
 
 if(CMAKE_COMPILER_IS_GNUCXX AND COVERAGE)
     Include(CodeCoverage)
 endif()
 
 ##############################################
-# Find system dependencies
+# FIND Dependencies
+if(NOT $ENV{AIDGE_INSTALL} STREQUAL "")
+    set(CMAKE_INSTALL_PREFIX $ENV{AIDGE_INSTALL})
+    list(APPEND CMAKE_PREFIX_PATH $ENV{AIDGE_INSTALL})
+    message(WARNING "Env var AIDGE_INSTALL detected : $ENV{AIDGE_INSTALL}. Set CMAKE_INSTALL_PREFIX to AIDGE_INSTALL & added to CMAKE_PREFIX_PATH"
+                    "\n\tCMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}"
+                    "\n\tCMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}")
+endif()
 find_package(aidge_core REQUIRED)
-# Example of adding dependency to backend CPU
-# find_package(aidge_backend_cpu REQUIRED)
+# find_package(aidge_backend_cpu REQUIRED) # example if you want to add aidge_backend_cpu as dependency to your export
 
 ##############################################
 # Create target and set properties
@@ -42,15 +49,30 @@ file(GLOB_RECURSE src_files "src/*.cpp")
 file(GLOB_RECURSE inc_files "include/*.hpp")
 
 add_library(${module_name} ${src_files} ${inc_files})
+
 target_link_libraries(${module_name}
     PUBLIC
-        _aidge_core # _ is added because we link the target not the project
-        # _aidge_backend_cpu  # Example of adding dependency to backend CPUs
+        _aidge_core # _ is added because we link the exported target and not the project
+        # _aidge_backend_cpu # example if you want to add aidge_backend_cpu as dependency to your export
 )
 
 #Set target properties
 set_property(TARGET ${module_name} PROPERTY POSITION_INDEPENDENT_CODE ON)
 
+# PYTHON BINDING
+if (PYBIND)
+    # Handles Python + pybind11 headers dependencies
+    include(PybindModuleCreation)
+    generate_python_binding(${CMAKE_PROJECT_NAME} ${module_name})
+
+    target_link_libraries(${module_name}
+        PUBLIC
+            pybind11::pybind11
+        PRIVATE
+            Python::Python
+        )
+endif()
+
 if( ${ENABLE_ASAN} )
     message("Building ${module_name} with ASAN.")
     set(SANITIZE_FLAGS -fsanitize=address -fno-omit-frame-pointer)
@@ -72,17 +94,7 @@ target_include_directories(${module_name}
         ${CMAKE_CURRENT_SOURCE_DIR}/src
 )
 
-# PYTHON BINDING
-if (PYBIND)
-    generate_python_binding(${project} ${module_name})
-
-    # Handles Python + pybind11 headers dependencies
-    target_link_libraries(${module_name}
-        PUBLIC
-            pybind11::pybind11
-        )
-endif()
-
+target_link_libraries(${module_name} PUBLIC fmt::fmt)
 target_compile_features(${module_name} PRIVATE cxx_std_14)
 
 target_compile_options(${module_name} PRIVATE
@@ -98,22 +110,19 @@ endif()
 
 ##############################################
 # Installation instructions
-
 include(GNUInstallDirs)
-set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${project})
+set(INSTALL_CONFIGDIR ${CMAKE_INSTALL_LIBDIR}/cmake/${CMAKE_PROJECT_NAME})
 
-install(TARGETS ${module_name} EXPORT ${project}-targets
+install(TARGETS ${module_name} EXPORT ${CMAKE_PROJECT_NAME}-targets
   LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
   ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
   RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
 )
-
 install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
 
 #Export the targets to a script
-
-install(EXPORT ${project}-targets
- FILE "${project}-targets.cmake"
+install(EXPORT ${CMAKE_PROJECT_NAME}-targets
+ FILE "${CMAKE_PROJECT_NAME}-targets.cmake"
  DESTINATION ${INSTALL_CONFIGDIR}
  COMPONENT ${module_name}
 )
@@ -121,26 +130,27 @@ install(EXPORT ${project}-targets
 #Create a ConfigVersion.cmake file
 include(CMakePackageConfigHelpers)
 write_basic_package_version_file(
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
     VERSION ${version}
     COMPATIBILITY AnyNewerVersion
 )
 
-configure_package_config_file("${project}-config.cmake.in"
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
+configure_package_config_file("${CMAKE_PROJECT_NAME}-config.cmake.in"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
     INSTALL_DESTINATION ${INSTALL_CONFIGDIR}
 )
 
 #Install the config, configversion and custom find modules
 install(FILES
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config.cmake"
-    "${CMAKE_CURRENT_BINARY_DIR}/${project}-config-version.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config.cmake"
+    "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_PROJECT_NAME}-config-version.cmake"
     DESTINATION ${INSTALL_CONFIGDIR}
 )
 
 ##############################################
 ## Exporting from the build tree
-export(EXPORT ${project}-targets
+message(STATUS "Exporting created targets to use them in another build")
+export(EXPORT ${CMAKE_PROJECT_NAME}-targets
     FILE "${CMAKE_CURRENT_BINARY_DIR}/${project}-targets.cmake")
 
 # Compile executable
diff --git a/aidge_core/aidge_export_aidge/static/cmake/PybindModuleCreation.cmake b/aidge_core/aidge_export_aidge/static/cmake/PybindModuleCreation.cmake
index 87e70fc38c9e4ec4ddb44cbe5d7fb2a31c2e94d6..193f3332231ac384daab2e5bf75c1a5de0d2bf1d 100644
--- a/aidge_core/aidge_export_aidge/static/cmake/PybindModuleCreation.cmake
+++ b/aidge_core/aidge_export_aidge/static/cmake/PybindModuleCreation.cmake
@@ -1,15 +1,14 @@
-function(generate_python_binding name target_to_bind) 
+function(generate_python_binding name target_to_bind)
+
+    find_package(Python COMPONENTS Interpreter Development)
+
     add_definitions(-DPYBIND)
     Include(FetchContent)
-
     FetchContent_Declare(
     PyBind11
     GIT_REPOSITORY https://github.com/pybind/pybind11.git
     GIT_TAG        v2.10.4 # or a later release
     )
-
-    # Use the New FindPython mode, recommanded. Requires CMake 3.15+
-    find_package(Python COMPONENTS Interpreter Development)
     FetchContent_MakeAvailable(PyBind11)
 
     message(STATUS "Creating binding for module ${name}")
@@ -17,5 +16,10 @@ function(generate_python_binding name target_to_bind)
 
     pybind11_add_module(${name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
     target_include_directories(${name} PUBLIC "python_binding")
-    target_link_libraries(${name} PUBLIC ${target_to_bind})
+
+    # Handles Python + pybind11 headers dependencies
+    target_link_libraries(${name}
+        PUBLIC
+            ${target_to_bind}
+    )
 endfunction()
diff --git a/aidge_core/export_utils/node_export.py b/aidge_core/export_utils/node_export.py
index 80c37dd0a54d57561ce1a872ea540461aeec30a0..7aceaa0ccc1f07674241d6f35bbeff90330f2596 100644
--- a/aidge_core/export_utils/node_export.py
+++ b/aidge_core/export_utils/node_export.py
@@ -1,4 +1,4 @@
-import aidge_core
+from aidge_core import Node, Attributes
 
 from abc import ABC, abstractmethod
 
@@ -8,7 +8,7 @@ class ExportNode(ABC):
     """
 
     @abstractmethod
-    def __init__(self, aidge_node: aidge_core.Node) -> None:
+    def __init__(self, aidge_node: Node) -> None:
         """Create ExportNode and retieve attirubtes from ``aidge_node``:
 
         - name: aidge Node name
diff --git a/aidge_core/unit_tests/test_export.py b/aidge_core/unit_tests/test_export.py
index 3061d7940603b8eeca2c07368bc1bbd6756fa1f3..9fb16128eebed9102cdf0e46e359a832bf6ac140 100644
--- a/aidge_core/unit_tests/test_export.py
+++ b/aidge_core/unit_tests/test_export.py
@@ -8,15 +8,15 @@ http://www.eclipse.org/legal/epl-2.0.
 SPDX-License-Identifier: EPL-2.0
 """
 
-import unittest
 import aidge_core
-from functools import reduce
-import pathlib
+from aidge_core.utils import run_command
+import unittest
 import os
-import sys
-import subprocess
+import pathlib
 import shutil
-import numpy as np
+import subprocess
+import sys
+
 
 def initFiller(model):
     # Initialize parameters (weights and biases)
@@ -27,57 +27,126 @@ def initFiller(model):
             value.set_backend("cpu")
             tuple_out = node.output(0)[0]
             # No conv in current network
-            if tuple_out[0].type() == "Conv" and tuple_out[1]==1:
+            if tuple_out[0].type() == "Conv" and tuple_out[1] == 1:
                 # Conv weight
                 aidge_core.xavier_uniform_filler(value)
-            elif tuple_out[0].type() == "Conv" and tuple_out[1]==2:
+            elif tuple_out[0].type() == "Conv" and tuple_out[1] == 2:
                 # Conv bias
                 aidge_core.constant_filler(value, 0.01)
-            elif tuple_out[0].type() == "FC" and tuple_out[1]==1:
+            elif tuple_out[0].type() == "FC" and tuple_out[1] == 1:
                 # FC weight
                 aidge_core.normal_filler(value)
-            elif tuple_out[0].type() == "FC" and tuple_out[1]==2:
+            elif tuple_out[0].type() == "FC" and tuple_out[1] == 2:
                 # FC bias
                 aidge_core.constant_filler(value, 0.01)
             else:
                 pass
 
 
+def clean_dir(dir: pathlib.Path) -> None:
+    if not dir.is_dir():
+        print(f"Error : directory {dir} doesn't exist. Exiting clean_dir().")
+        return
+    for filename in os.listdir(dir):
+        file_path = os.path.join(dir, filename)
+        try:
+            if os.path.isfile(file_path) or os.path.islink(file_path):
+                os.unlink(file_path)
+            elif os.path.isdir(file_path):
+                shutil.rmtree(file_path)
+        except Exception as e:
+            print(f"Failed to delete {file_path}. Reason: {e}")
+    return
+
+
 class test_export(unittest.TestCase):
-    """Test aidge export
-    """
+    """Test aidge export"""
+
     def setUp(self):
-        self.EXPORT_PATH = pathlib.Path("myexport")
+        self.EXPORT_PATH: pathlib.Path = pathlib.Path("dummy_export")
+        self.BUILD_DIR: pathlib.Path = self.EXPORT_PATH / "build"
+
     def tearDown(self):
         pass
 
     def test_generate_export(self):
         # Create model
 
-        model = aidge_core.sequential([
-            aidge_core.FC(in_channels=32*32*3, out_channels=512, name="InputNode"),
-            aidge_core.ReLU(name="Relu0"),
-            aidge_core.FC(in_channels=512, out_channels=256, name="FC1"),
-            aidge_core.ReLU(name="Relu1"),
-            aidge_core.FC(in_channels=256, out_channels=128, name="FC2"),
-            aidge_core.ReLU(name="Relu2"),
-            aidge_core.FC(in_channels=128, out_channels=10, name="OutputNode"),
-        ])
+        model = aidge_core.sequential(
+            [
+                aidge_core.FC(
+                    in_channels=32 * 32 * 3, out_channels=512, name="InputNode"
+                ),
+                aidge_core.ReLU(name="Relu0"),
+                aidge_core.FC(in_channels=512, out_channels=256, name="FC1"),
+                aidge_core.ReLU(name="Relu1"),
+                aidge_core.FC(in_channels=256, out_channels=128, name="FC2"),
+                aidge_core.ReLU(name="Relu2"),
+                aidge_core.FC(in_channels=128, out_channels=10, name="OutputNode"),
+            ]
+        )
 
         initFiller(model)
 
         # Export model
         aidge_core.export(self.EXPORT_PATH, model)
-        self.assertTrue(self.EXPORT_PATH.is_dir(), "Export folder has not been generated")
-        os.makedirs(self.EXPORT_PATH / "build", exist_ok=True)
+
+        self.assertTrue(
+            self.EXPORT_PATH.is_dir(), "Export folder has not been generated"
+        )
+        os.makedirs(self.BUILD_DIR, exist_ok=True)
+        clean_dir(self.BUILD_DIR)  # if build dir existed already ensure its emptyness
 
         # Test compilation of export
-        install_path = os.path.join(sys.prefix, "lib", "libAidge")  if "AIDGE_INSTALL" not in os.environ else os.environ["AIDGE_INSTALL"]
+        install_path = (
+            os.path.join(sys.prefix, "lib", "libAidge")
+            if "AIDGE_INSTALL" not in os.environ
+            else os.environ["AIDGE_INSTALL"]
+        )
+
+        shutil.copyfile(
+            pathlib.Path(__file__).parent / "static/main.cpp",
+            self.EXPORT_PATH / "main.cpp",
+        )
+
+        ##########################
+        # CMAKE EXPORT
+        try:
+            for std_line in run_command(
+                [
+                    "cmake",
+                    str(self.EXPORT_PATH.absolute()),
+                    "-DPYBIND=1",
+                    f"-DCMAKE_INSTALL_PREFIX:PATH={install_path}",
+                ],
+                cwd=str(self.BUILD_DIR),
+            ):
+                print(std_line, end="")
+        except subprocess.CalledProcessError as e:
+            print(f"An error occurred: {e}\nFailed to configure export.")
+
+        ##########################
+        # BUILD EXPORT
+        try:
+            for std_line in run_command(
+                ["cmake", "--build", "."],
+                cwd=str(self.BUILD_DIR),
+            ):
+                print(std_line, end="")
+        except subprocess.CalledProcessError as e:
+            print(f"An error occurred: {e}\nFailed to build export.")
 
-        shutil.copyfile(pathlib.Path(__file__).parent / "static/main.cpp", self.EXPORT_PATH / "main.cpp")
+        ##########################
+        # INSTALL EXPORT
+        try:
+            for std_line in run_command(
+                ["cmake", "--install", "."],
+                cwd=str(self.BUILD_DIR),
+            ):
+                print(std_line, end="")
+        except subprocess.CalledProcessError as e:
+            print(f"An error occurred: {e}\nFailed to install export.")
 
-        subprocess.check_call(['cmake', str(self.EXPORT_PATH.absolute()), f'-DCMAKE_INSTALL_PREFIX:PATH={install_path}'], cwd=str(self.EXPORT_PATH / "build"))
-        subprocess.check_call(['make', 'all', 'install'], cwd=str(self.EXPORT_PATH / "build"))
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/aidge_core/utils.py b/aidge_core/utils.py
index d82d524b7e886ed396507376a5934a748a89e44c..b6890bc2432b29499d1b06e7229c8c524a36cb06 100644
--- a/aidge_core/utils.py
+++ b/aidge_core/utils.py
@@ -1,3 +1,20 @@
+"""
+Copyright (c) 2023 CEA-List
+
+This program and the accompanying materials are made available under the
+terms of the Eclipse Public License 2.0 which is available at
+http://www.eclipse.org/legal/epl-2.0.
+
+SPDX-License-Identifier: EPL-2.0
+"""
+
+import queue
+import threading
+import subprocess
+import pathlib
+from typing import List
+
+
 def template_docstring(template_keyword, text_to_replace):
     """Method to template docstring
 
@@ -6,11 +23,87 @@ def template_docstring(template_keyword, text_to_replace):
     :param text_to_replace: Text to replace your template with.
     :type text_to_replace: str
     """
+
     def dec(func):
-        if "{"+template_keyword+"}" not in func.__doc__:
+        if "{" + template_keyword + "}" not in func.__doc__:
             raise RuntimeError(
-                f"The function {function.__name__} docstring does not contain the template keyword: {template_keyword}.")
+                f"The function {func.__name__} docstring does not contain the template keyword: {template_keyword}."
+            )
         func.__doc__ = func.__doc__.replace(
-            "{"+template_keyword+"}", text_to_replace)
+            "{" + template_keyword + "}", text_to_replace
+        )
         return func
+
     return dec
+
+
+
+
+def run_command(command: List[str], cwd: pathlib.Path = None):
+    """
+    This function has the job to run a command and return stdout and stderr that are not shown
+    by subprocess.check_call / call.
+    If the subprocess returns smthg else than 0, it will raise an error.
+    Arg:
+        command : written with the same syntax as subprocess.call
+        cwd : path from where the command must be called
+
+    Call example:
+    ```python
+        try:
+            for std_line in run_command(
+                [
+                    "cmake",
+                    str(self.EXPORT_PATH.absolute()),
+                    "-DPYBIND=1",
+                    f"-DCMAKE_INSTALL_PREFIX:PATH={install_path}",
+                ],
+                cwd=str(self.BUILD_DIR),
+            ):
+                print(std_line, end="")
+        except subprocess.CalledProcessError as e:
+            print(f"An error occurred: {e}\nFailed to configure export.")
+    ```
+    """
+    process = subprocess.Popen(
+        command, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
+    )
+
+    stdout_queue = queue.Queue()
+    stderr_queue = queue.Queue()
+
+    def enqueue_output(stream, queue_to_append):
+        for line in iter(stream.readline, ""):
+            queue_to_append.put(line)
+        stream.close()
+
+    stdout_thread = threading.Thread(
+        target=enqueue_output, args=(process.stdout, stdout_queue)
+    )
+    stderr_thread = threading.Thread(
+        target=enqueue_output, args=(process.stderr, stderr_queue)
+    )
+    stdout_thread.start()
+    stderr_thread.start()
+
+    while (
+        stdout_thread.is_alive()
+        or stderr_thread.is_alive()
+        or not stdout_queue.empty()
+        or not stderr_queue.empty()
+    ):
+        try:
+            stdout_line = stdout_queue.get_nowait()
+            yield stdout_line
+        except queue.Empty:
+            pass
+
+        try:
+            stderr_line = stderr_queue.get_nowait()
+            yield stderr_line
+        except queue.Empty:
+            pass
+
+    return_code = process.wait()
+    if return_code != 0:
+        raise subprocess.CalledProcessError(return_code, command)
diff --git a/cmake/PybindModuleCreation.cmake b/cmake/PybindModuleCreation.cmake
index 8030c1a8639e4b7ae0c5fb865e928a4260c6ae7d..e2bbb2c3fb57867e8add781805033fa5979393a9 100644
--- a/cmake/PybindModuleCreation.cmake
+++ b/cmake/PybindModuleCreation.cmake
@@ -1,15 +1,14 @@
 function(generate_python_binding name target_to_bind)
+
+    find_package(Python COMPONENTS Interpreter Development.Module)
+
     add_definitions(-DPYBIND)
     Include(FetchContent)
-
     FetchContent_Declare(
     PyBind11
     GIT_REPOSITORY https://github.com/pybind/pybind11.git
     GIT_TAG        v2.10.4 # or a later release
     )
-
-    # Use the New FindPython mode, recommanded. Requires CMake 3.15+
-    find_package(Python COMPONENTS Interpreter Development)
     FetchContent_MakeAvailable(PyBind11)
 
     message(STATUS "Creating binding for module ${name}")
@@ -17,5 +16,10 @@ function(generate_python_binding name target_to_bind)
 
     pybind11_add_module(${name} MODULE ${pybind_src_files} "NO_EXTRAS") # NO EXTRA recquired for pip install
     target_include_directories(${name} PUBLIC "python_binding")
-    target_link_libraries(${name} PUBLIC ${target_to_bind})
+
+    # Handles Python + pybind11 headers dependencies
+    target_link_libraries(${name}
+        PUBLIC
+            ${target_to_bind}
+    )
 endfunction()
diff --git a/include/aidge/operator/Sub.hpp b/include/aidge/operator/Sub.hpp
index fc30e51c9a6daed56a2e0e665be645739961aa6b..ba5a021c30f13bbc2ae73c90078548c5b677a3a5 100644
--- a/include/aidge/operator/Sub.hpp
+++ b/include/aidge/operator/Sub.hpp
@@ -12,6 +12,7 @@
 #ifndef AIDGE_CORE_OPERATOR_SUB_H_
 #define AIDGE_CORE_OPERATOR_SUB_H_
 
+#include <array>
 #include <memory>
 #include <vector>
 
diff --git a/include/aidge/utils/Directories.hpp b/include/aidge/utils/Directories.hpp
index 3bc07b9dd58e472096102c1b0c66971164d632a3..ca49e1b57cc5d01f9f0ff7fe8dc85520697c6821 100644
--- a/include/aidge/utils/Directories.hpp
+++ b/include/aidge/utils/Directories.hpp
@@ -14,11 +14,22 @@
 #define AIDGE_DIRECTORIES_H_
 
 
-#include <string>  // std::string
-#include <sstream> // std::stringstream
+#include <algorithm>
+#include <errno.h>
 #include <iostream>
+#include <sstream> // std::stringstream
+#include <string>  // std::string
 #include <sys/stat.h>
-#include <errno.h>
+#ifndef _S_ISTYPE
+#define _S_ISTYPE(mode, mask)  (((mode) & _S_IFMT) == (mask))
+#endif
+#ifndef S_ISREG
+#define S_ISREG(mode) _S_ISTYPE((mode), _S_IFREG)
+#endif
+#ifndef S_ISDIR
+#define S_ISDIR(mode) _S_ISTYPE((mode), _S_IFDIR)
+#endif
+
 
 #ifdef WIN32
 #include <direct.h>
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..2dcb0701988d944c93a9f2e0ddfad3e0ebeb3313
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,397 @@
+[project]
+name = "aidge_core"
+description="Core algorithms for operators and graph of the AIDGE framework"
+dependencies = [
+    "numpy>=1.21.6",
+    "Jinja2>=3.1.2"
+]
+requires-python = ">= 3.7"
+readme = "README.md"
+license = { file = "LICENSE" }
+classifiers = [ 
+    "Development Status :: 2 - Pre-Alpha",
+    "Programming Language :: Python :: 3"
+    ]
+dynamic = ["version"] # defined in tool.setuptools_scm
+
+[project.optional-dependencies]
+test = [
+    "pytest"
+]
+
+[build-system]
+requires = [
+    "setuptools>=64",
+    "setuptools_scm[toml]==7.1.0",
+    "cmake>=3.27.9",
+    "toml"
+]
+build-backend = "setuptools.build_meta"
+
+#####################################################
+# SETUPTOOLS
+[tool.setuptools]
+[tool.setuptools.packages.find]
+where = ["."]  # list of folders that contain the packages (["."] by default)
+include = [ # package names should match these glob patterns (["*"] by default)
+    "aidge_core*"
+]
+exclude = [ # exclude packages matching these glob patterns (empty by default)
+    "aidge_core.unit_tests",
+    "aidge_core.unit_tests.static"
+] 
+
+# SETUPTOOLS_SCM
+[tool.setuptools_scm]
+write_to = "aidge_core/_version.py"
+
+#####################################################
+# CIBUILDWHEEL
+[tool.cibuildwheel]
+build-frontend = "build"
+test-requires = "pytest"
+# FIXME: The ignored export test requires a to build the generated export via cmake.
+# However due to a strange bug I haven't been able to properly link Python::Module to the export target
+# Resulting in the need to link Python::Python which is the python interpreter.
+# This suppresses the issue but sadly this target is not available on the cibuilwheel image.
+# Hence the test is ignored. If you want to try and solve this bug go on. 
+# Just take care to increment the counter just below.
+# 
+# Work time spent on this bug : 24h
+test-command = "pytest --ignore={package}/aidge_core/unit_tests/test_export.py {package}/aidge_core/unit_tests"
+# uncomment to run cibuildwheel locally on selected distros
+# build=[
+# "cp38-manylinux_x86_64",
+# "cp39-manylinux_x86_64",
+# "cp38-win_amd64",
+# "cp39-win_amd64",
+# "cp310-win_amd64",
+# ]
+
+### AIDGE DEPENDENCIES DECLARATION
+[tool.cibuildwheel.environment]
+# aidge_core do not rely on any aidge dependency, hence this string is empty
+AIDGE_DEPENDENCIES = "" # format => "dep_1 dep_2 ... dep_n"
+AIDGE_INSTALL="/AIDGE_INSTALL_CIBUILDWHEEL"
+[tool.cibuildwheel.linux]
+before-build = [
+    "bash .gitlab/ci/cibuildwheel_build_deps_before_build_wheel.sh /host"
+]
+[tool.cibuildwheel.windows]
+before-build = [
+    "python -m pip debug -v",
+    "powershell -File .\\.gitlab\\ci\\cibuildwheel_build_deps_before_build_wheel.ps1"
+]
+
+
+#####################################################
+# PYLINT
+[tool.pylint.main]
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-allow-list = ["aidge_core", "torch", "tensorflow"]
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore = ["CVS"]
+
+# List of module names for which member attributes should not be checked (useful
+# for modules/projects where namespaces are manipulated during runtime and thus
+# existing member attributes cannot be deduced by static analysis). It supports
+# qualified module names, as well as Unix pattern matching.
+ignored-modules = ["aidge_core"]
+
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use, and will cap the count on Windows to
+# avoid hangs.
+jobs = 1
+
+# Control the amount of potential inferred values when inferring a single object.
+# This can help the performance when dealing with large functions or complex,
+# nested conditions.
+limit-inference-results = 100
+
+# Pickle collected data for later comparisons.
+persistent = true
+
+# Minimum Python version to use for version dependent checks. Will default to the
+# version used to run pylint.
+py-version = "3.7"
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode = true
+
+[tool.pylint.basic]
+# Naming style matching correct argument names.
+argument-naming-style = "snake_case"
+
+# Naming style matching correct attribute names.
+attr-naming-style = "snake_case"
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"]
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style = "any"
+
+# Naming style matching correct class constant names.
+class-const-naming-style = "UPPER_CASE"
+
+# Naming style matching correct class names.
+class-naming-style = "PascalCase"
+
+# Naming style matching correct constant names.
+const-naming-style = "UPPER_CASE"
+
+# Minimum line length for functions/classes that require docstrings, shorter ones
+# are exempt.
+docstring-min-length = -1
+
+# Naming style matching correct function names.
+function-naming-style = "snake_case"
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names = ["i", "j", "k", "ex", "Run", "_"]
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style = "any"
+
+# Naming style matching correct method names.
+method-naming-style = "snake_case"
+
+# Naming style matching correct module names.
+module-naming-style = "snake_case"
+
+# Regular expression which should only match function or class names that do not
+# require a docstring.
+no-docstring-rgx = "^_"
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties. These
+# decorators are taken in consideration only for invalid-name.
+property-classes = ["abc.abstractproperty"]
+
+# Naming style matching correct variable names.
+variable-naming-style = "snake_case"
+
+[tool.pylint.classes]
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods = ["__init__", "__new__", "setUp", "__post_init__"]
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make"]
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg = ["cls"]
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg = ["cls"]
+
+[tool.pylint.design]
+# Maximum number of arguments for function / method.
+max-args = 5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes = 7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr = 5
+
+# Maximum number of branch for function / method body.
+max-branches = 12
+
+# Maximum number of locals for function / method body.
+max-locals = 15
+
+# Maximum number of parents for a class (see R0901).
+max-parents = 7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods = 20
+
+# Maximum number of return / yield for function / method body.
+max-returns = 6
+
+# Maximum number of statements in function / method body.
+max-statements = 50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods = 2
+
+[tool.pylint.exceptions]
+# Exceptions that will emit a warning when caught.
+overgeneral-exceptions = ["BaseException", "Exception"]
+
+[tool.pylint.format]
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+# expected-line-ending-format =
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines = "^\\s*(# )?<?https?://\\S+>?$"
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren = 4
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string = "    "
+
+# Maximum number of characters on a single line.
+max-line-length = 200
+
+# Maximum number of lines in a module.
+max-module-lines = 1000
+
+[tool.pylint.imports]
+# Force import order to recognize a module as part of a third party library.
+known-third-party = ["enchant"]
+
+[tool.pylint.logging]
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style = "old"
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules = ["logging"]
+
+[tool.pylint."messages control"]
+# Only show warnings with the listed confidence levels. Leave empty to show all.
+# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"]
+
+# Disable the message, report, category or checker with the given id(s). You can
+# either give multiple identifiers separated by comma (,) or put this option
+# multiple times (only on the command line, not in the configuration file where
+# it should appear only once). You can also use "--disable=all" to disable
+# everything first and then re-enable specific checks. For example, if you want
+# to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero", "too-many-locals", "missing-class-docstring", "missing-function-docstring", "too-many-arguments", "protected-access", "too-many-branches", "too-many-ancestors", "wrong-import-order", "wrong-import-position"]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where it
+# should appear only once). See also the "--disable" option for examples.
+enable = ["c-extension-no-member"]
+
+[tool.pylint.method_args]
+# List of qualified names (i.e., library.method) which require a timeout
+# parameter e.g. 'requests.api.get,requests.api.post'
+timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"]
+
+[tool.pylint.miscellaneous]
+# List of note tags to take in consideration, separated by a comma.
+notes = ["FIXME", "XXX", "TODO"]
+
+# Regular expression of note tags to take in consideration.
+# notes-rgx =
+
+[tool.pylint.refactoring]
+# Maximum number of nested blocks for function / method body
+max-nested-blocks = 5
+
+# Complete name of functions that never returns. When checking for inconsistent-
+# return-statements if a never returning function is called then it will be
+# considered as an explicit return statement and no message will be printed.
+never-returning-functions = ["sys.exit", "argparse.parse_error"]
+
+# Let 'consider-using-join' be raised when the separator to join on would be non-
+# empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``)
+suggest-join-with-non-empty-separator = true
+
+[tool.pylint.reports]
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'fatal', 'error', 'warning', 'refactor',
+# 'convention', and 'info' which contain the number of messages in each category,
+# as well as 'statement' which is the total number of statements analyzed. This
+# score is used by the global evaluation report (RP0004).
+evaluation = "10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)"
+
+# Activate the evaluation score.
+score = true
+
+[tool.pylint.similarities]
+# Comments are removed from the similarity computation
+ignore-comments = true
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings = true
+
+# Minimum lines number of a similarity.
+min-similarity-lines = 4
+
+[tool.pylint.spelling]
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions = 4
+
+# List of comma separated words that should be considered directives if they
+# appear at the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:"
+
+[tool.pylint.typecheck]
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators = ["contextlib.contextmanager"]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# class is considered mixin if its name matches the mixin-class-rgx option.
+# Tells whether to warn about missing members when the owner of the attribute is
+# inferred to be None.
+ignore-none = true
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference can
+# return multiple potential results while evaluating a Python object, but some
+# branches might not be evaluated, which results in partial inference. In that
+# case, it might be useful to still emit no-member and other checks for the rest
+# of the inferred objects.
+ignore-on-opaque-inference = true
+
+# List of symbolic message names to ignore for Mixin members.
+ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"]
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "aidge.global_variables", "aidge.cells.abstract_cell.Trainable", "torch", "tensorflow"]
+
+# Show a hint with possible names when a member name was not found. The aspect of
+# finding the hint is based on edit distance.
+missing-member-hint = true
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance = 1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices = 1
+
+# Regex pattern to define which classes are considered mixins.
+mixin-class-rgx = ".*[Mm]ixin"
+
+[tool.pylint.variables]
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables = true
+
+# List of strings which can identify a callback function by name. A callback name
+# must start or end with one of those strings.
+callbacks = ["cb_", "_cb"]
+
+# A regular expression matching the name of dummy variables (i.e. expected to not
+# be used).
+dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_"
+
+# Argument names that match this expression will be ignored.
+ignored-argument-names = "_.*|^ignored_|^unused_"
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"]
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 32ec29bb9b826038eb21ce2927f2fef08973b2b8..0000000000000000000000000000000000000000
--- a/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-numpy
-Jinja2
diff --git a/setup.py b/setup.py
index 60807df560510ad4cfacfdd2b178aca957306439..852da8432c29cf6b7370946289c429479d506228 100644
--- a/setup.py
+++ b/setup.py
@@ -1,50 +1,29 @@
-#!/usr/bin/env python3
-""" Aidge
-
-#TODO To change
-POC of the next framework named Aidge
-"""
-
-DOCLINES = (__doc__ or '').split("\n")
-
 import sys
 import os
-
-# Python supported version checks
-if sys.version_info[:2] < (3, 7):
-    raise RuntimeError("Python version >= 3.7 required.")
-
-
-CLASSIFIERS = """\
-Development Status :: 2 - Pre-Alpha
-"""
-
 import shutil
 import pathlib
-import subprocess
 import multiprocessing
 
 from math import ceil
 
+import toml
+
 from setuptools import setup, Extension
-from setuptools import find_packages
 from setuptools.command.build_ext import build_ext
 
-def get_project_name() -> str:
-    return open(pathlib.Path().absolute() / "project_name.txt", "r").read()
 
-def get_project_version() -> str:
-    aidge_root = pathlib.Path().absolute()
-    version = open(aidge_root / "version.txt", "r").read().strip()
-    return version
+def get_project_name() -> str:
+    with open(pathlib.Path().absolute() / "pyproject.toml", "r") as file:
+        project_toml = toml.load(file)
+        return project_toml["project"]["name"]
 
 
 class CMakeExtension(Extension):
     def __init__(self, name):
         super().__init__(name, sources=[])
 
-class CMakeBuild(build_ext):
 
+class CMakeBuild(build_ext):
     def run(self):
         # This lists the number of processors available on the machine
         # The compilation will use half of them
@@ -62,17 +41,42 @@ class CMakeBuild(build_ext):
 
         os.chdir(str(build_temp))
 
-        # Impose to use the executable of the python
-        # used to launch setup.py to setup PythonInterp
-        param_py = "-DPYTHON_EXECUTABLE=" + sys.executable
+        compile_type = (
+            "Release"
+            if "AIDGE_PYTHON_BUILD_TYPE" not in os.environ
+            else os.environ["AIDGE_PYTHON_BUILD_TYPE"]
+        )
+        install_path = (
+            os.path.join(sys.prefix, "lib", "libAidge")
+            if "AIDGE_INSTALL" not in os.environ
+            else os.environ["AIDGE_INSTALL"]
+        )
+
+        build_gen = (
+            ["-G", os.environ["AIDGE_BUILD_GEN"]]
+            if "AIDGE_BUILD_GEN" in os.environ
+            else []
+        )
+
+        self.spawn(
+            [
+                "cmake",
+                *build_gen,
+                str(cwd),
+                "-DTEST=OFF",
+                f"-DCMAKE_INSTALL_PREFIX:PATH={install_path}",
+                f"-DCMAKE_BUILD_TYPE={compile_type}",
+                "-DPYBIND=ON",
+                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
+                "-DCOVERAGE=OFF",
+            ]
+        )
 
-        compile_type = 'Debug'
-        install_path = os.path.join(sys.prefix, "lib", "libAidge")  if "AIDGE_INSTALL" not in os.environ else os.environ["AIDGE_INSTALL"]
-
-        self.spawn(['cmake', str(cwd), param_py, '-DTEST=OFF', f'-DCMAKE_INSTALL_PREFIX:PATH={install_path}', f'-DCMAKE_BUILD_TYPE={compile_type}'])
         if not self.dry_run:
-            self.spawn(['cmake', '--build', '.', '--config', compile_type, '-j', max_jobs])
-            self.spawn(['cmake', '--install', '.', '--config', compile_type])
+            self.spawn(
+                ["cmake", "--build", ".", "--config", compile_type, "-j", max_jobs]
+            )
+            self.spawn(["cmake", "--install", ".", "--config", compile_type])
         os.chdir(str(cwd))
 
         aidge_package = build_lib / (get_project_name())
@@ -83,8 +87,10 @@ class CMakeBuild(build_ext):
         # Copy all shared object files from build_temp/lib to aidge_package
         for root, _, files in os.walk(build_temp.absolute()):
             for file in files:
-                if (file.endswith('.so') or file.endswith('.pyd')) and (root != str(aidge_package.absolute())):
-                    currentFile=os.path.join(root, file)
+                if (file.endswith(".so") or file.endswith(".pyd")) and (
+                    root != str(aidge_package.absolute())
+                ):
+                    currentFile = os.path.join(root, file)
                     shutil.copy(currentFile, str(aidge_package.absolute()))
 
         # Copy version.txt in aidge_package
@@ -92,22 +98,11 @@ class CMakeBuild(build_ext):
         shutil.copy("version.txt", str(aidge_package.absolute()))
 
 
-if __name__ == '__main__':
-
+if __name__ == "__main__":
     setup(
-        name=get_project_name(),
-        version=get_project_version(),
-        python_requires='>=3.7',
-        description=DOCLINES[0],
-        long_description_content_type="text/markdown",
-        long_description="\n".join(DOCLINES[2:]),
-        classifiers=[c for c in CLASSIFIERS.split('\n') if c],
-        packages=find_packages(where="."),
-        include_package_data=True,
         ext_modules=[CMakeExtension(get_project_name())],
         cmdclass={
-            'build_ext': CMakeBuild,
+            "build_ext": CMakeBuild,
         },
         zip_safe=False,
-
     )
diff --git a/src/operator/Concat.cpp b/src/operator/Concat.cpp
index 58ee7355e3aec0b86991d8df22753953304dd7c9..4649a954a095d239dbe7de7bcbebf1025a3b22c6 100644
--- a/src/operator/Concat.cpp
+++ b/src/operator/Concat.cpp
@@ -23,10 +23,9 @@ void Aidge::Concat_OpImpl::forward() {
     const DimSize_t axis = op.axis();
 
     assert(op.getInput(0) && "missing input in Concat operator");
-    DataType datatypeFirstInput = op.getInput(0)->dataType();
     for (IOIndex_t i = 1; i < mOp.nbInputs(); ++i) {
         assert(op.getInput(i) && "missing input in Concat operator");
-        assert(op.getInput(i)->dataType() == datatypeFirstInput);
+        assert(op.getInput(i)->dataType() == op.getInput(0)->dataType());
     }
 
     DimSize_t outputAxisValue = 0;