Skip to content
Snippets Groups Projects
Commit b12d55f1 authored by Cyril Moineau's avatar Cyril Moineau
Browse files

Remove operator_old.py

parent 838aa704
No related branches found
No related tags found
3 merge requests!27v0.2.0,!22v0.4.0,!15Export refactor
from aidge_core import ExportNode
import numpy as np
from jinja2 import Environment, FileSystemLoader
import os
import shutil
from aidge_export_cpp.register import export_cpp_register
dirpath = os.path.dirname(__file__)
class KERNELS:
ACTIVATION = dirpath + "/kernels/activation.hpp"
BATCHNORM = dirpath + "/kernels/batchnorm.hpp"
CONV = dirpath + "/kernels/convolution.hpp"
ADD = dirpath + "/kernels/elemwise.hpp"
FC = dirpath + "/kernels//fullyconnected.hpp"
POOLING = dirpath + "/kernels/pooling.hpp"
LEAKYRELU = dirpath + "/kernels/leakyrelu.hpp"
class KERNELS_FORWARD:
ACTIVATION = dirpath + "/templates/kernel_forward/activation_forward.jinja"
BATCHNORM = dirpath + "/templates/kernel_forward/batchnorm_forward.jinja"
CONV = dirpath + "/templates/kernel_forward/convolution_forward.jinja"
ADD = dirpath + "/templates/kernel_forward/elemwise_forward.jinja"
FC = dirpath + "/templates/kernel_forward/fullyconnected_forward.jinja"
POOLING = dirpath + "/templates/kernel_forward/pooling_forward.jinja"
LEAKYRELU = dirpath + "/templates/kernel_forward/leakyrelu_forward.jinja"
class CONFIGURATIONS:
ACTIVATION = dirpath + "/templates/configuration/activation_config.jinja"
BATCHNORM = dirpath + "/templates/configuration/batchnorm_config.jinja"
CONV = dirpath + "/templates/configuration/convolution_config.jinja"
ADD = dirpath + "/templates/configuration/elemwise_config.jinja"
FC = dirpath + "/templates/configuration//fullyconnected_config.jinja"
POOLING = dirpath + "/templates/configuration/pooling_config.jinja"
LEAKYRELU = dirpath + "/templates/configuration/leakyrelu_config.jinja"
##############################################
############## Export functions ##############
##############################################
def generate_file(filename, templatename, **kwargs):
# Get directory name of the file
dirname = os.path.dirname(filename)
# If directory doesn't exist, create it
if not os.path.exists(dirname):
os.makedirs(dirname)
# Get directory name and name of the template
template_dir = os.path.dirname(templatename)
template_name = os.path.basename(templatename)
# Select template
template = Environment(loader=FileSystemLoader(template_dir)).get_template(template_name)
# Generate file
content = template.render(kwargs)
with open(filename, mode="w", encoding="utf-8") as message:
message.write(content)
def generate_action(template_path, **kwargs):
dirname = os.path.dirname(template_path)
filename = os.path.basename(template_path)
template = Environment(loader=FileSystemLoader(dirname)).get_template(filename)
return template.render(kwargs)
def copyfile(filename, dst_folder):
# If directory doesn't exist, create it
if not os.path.exists(dst_folder):
os.makedirs(dst_folder)
shutil.copy(filename, dst_folder)
def export_to_static(name, array, filepath):
# Get directory name of the file
dirname = os.path.dirname(filepath)
# If directory doesn't exist, create it
if not os.path.exists(dirname):
os.makedirs(dirname)
generate_file(
filepath,
dirpath + "/templates/data/data_static.jinja",
dims = array.shape,
data_t = "float",
name = name,
values = array.tolist()
)
##############################################
################### Utils ####################
##############################################
def get_node_parents(node):
parents = []
for parent in node.get_parents():
if parent.type() != "Producer":
parents.append(parent)
return parents
def get_producer_parents(node):
parents = []
for parent in node.get_parents():
if parent.type() == "Producer":
parents.append(parent)
return parents
##############################################
################### Actions ##################
##############################################
def set_up_output(name, datatype):
return f"{datatype}* {name} = ({datatype}*) mem + {name.upper()}_OFFSET;"
##############################################
############## Operators helper ##############
##############################################
@export_cpp_register("Conv")
class ConvCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
self.kernel = node.get_operator().get_attr("KernelDims")
self.stride = node.get_operator().get_attr("StrideDims")
# Not working anymore because Padding is a standalone operator
# self.padding = node.get_operator().get_attr("PaddingDims")
self.padding = [1, 1]
self.dilation = node.get_operator().get_attr("DilationDims")
self.nb_channels = node.get_operator().get_attr("InChannels")
self.nb_outputs = node.get_operator().get_attr("OutChannels")
def export(self, export_folder:str, list_configs:list):
copyfile(KERNELS.CONV, f"{export_folder}/include/kernels/")
copyfile(dirpath + "/kernels/macs.hpp", f"{export_folder}/include/kernels/")
copyfile(dirpath + "/kernels/activation.hpp", f"{export_folder}/include/kernels/")
list_configs.append("kernels/convolution.hpp")
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.CONV,
name=self.name,
input_dims=self.inputs_dims[0][1:],
output_dims=self.outputs_dims[0][1:],
kernel=self.kernel,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
activation="Linear",
rescaling="NoScaling")
return list_configs
def forward(self, list_actions:list):
if not self.is_last:
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
KERNELS_FORWARD.CONV,
name=self.name,
input_name=self.inputs[0].name(),
output_name=self.name,
weights_name=self.inputs[1].name(),
biases_name=self.inputs[2].name()
))
return list_actions
class BatchNormCPP:
def __init__(self, node):
self.name = node.name()
self.epsilon = node.get_operator().get_attr("Epsilon")
self.producers = get_producer_parents(node)
self.scales = np.array(self.producers[0].get_operator().get_output(0)).reshape(-1).tolist()
self.biases = np.array(self.producers[1].get_operator().get_output(0)).reshape(-1).tolist()
self.means = np.array(self.producers[2].get_operator().get_output(0)).reshape(-1).tolist()
self.vars = np.array(self.producers[3].get_operator().get_output(0)).reshape(-1).tolist()
parents = get_node_parents(node)
if len(parents) == 0:
self.input_name = "in"
else :
self.input_name = parents[0].name()
def export(self, export_folder:str, list_configs:list):
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
"tensorrt/templates/configuration/batchnorm_config.jinja",
name=self.name,
input_dims=[0, 0, 0],
output_dims=[0, 0, 0],
activation="Linear",
epsilon=self.epsilon)
# export the batchnorm parameters
return list_configs
def forward(self, list_actions:list):
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
"cpp/templates/kernel_forward/batchnorm_forward.jinja",
name=self.name,
input_name=self.input_name,
output_name=self.name,
biases_name=self.producers[0].name(),
variances_name=self.producers[1].name(),
means_name=self.producers[2].name(),
scales_name=self.producers[3].name()
))
return list_actions
@export_cpp_register("ReLU")
class ReLUCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
self.nb_data = 1
for i in self.inputs_dims[0]:
self.nb_data *= i
def export(self, export_folder:str, list_configs:list):
copyfile(KERNELS.ACTIVATION, f"{export_folder}/include/kernels/")
list_configs.append("kernels/activation.hpp")
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.ACTIVATION,
name=self.name,
nb_data=self.nb_data,
activation="Rectifier",
rescaling="NoScaling")
return list_configs
def forward(self, list_actions:list):
if not self.is_last:
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
KERNELS_FORWARD.ACTIVATION,
name=self.name,
input_name=self.inputs[0].name(),
output_name=self.name
))
return list_actions
@export_cpp_register("LeakyReLU")
class LeakyReLUCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
self.alpha = node.get_operator().get_attr("NegativeSlope")
self.nb_data = 1
for i in self.inputs_dims[0]:
self.nb_data *= i
def export(self, export_folder:str, list_configs:list):
copyfile(KERNELS.LEAKYRELU, f"{export_folder}/include/kernels/")
list_configs.append("kernels/activation.hpp")
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.LEAKYRELU,
name=self.name,
nb_data=self.nb_data,
alpha = self.alpha)
return list_configs
def forward(self, list_actions:list):
if not self.is_last:
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
KERNELS_FORWARD.LEAKYRELU,
name=self.name,
input_name=self.inputs[0].name(),
output_name=self.name
))
return list_actions
class AddCPP:
def __init__(self, node):
self.name = node.name()
self.parents = get_node_parents(node)
def export(self, export_folder:str, list_configs:list):
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.ADD,
name=self.name,
input_dims=[0, 0, 0],
output_dims=[0, 0, 0],
activation="Linear",
elemwise_op="Sum")
return list_configs
def forward(self, list_actions:list):
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
"cpp/templates/kernel_forward/elemwise_forward.jinja",
name=self.name,
input1_name=self.parents[0].name(),
input2_name=self.parents[1].name(),
output_name=self.name
))
return list_actions
@export_cpp_register("MaxPooling")
class MaxPoolCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
self.kernel = node.get_operator().get_attr("KernelDims")
self.stride = node.get_operator().get_attr("StrideDims")
# Not supported by the core...
# self.padding = node.get_operator().get_attr("PaddingDims")
self.padding = [0, 0]
def export(self, export_folder:str, list_configs:list):
copyfile(KERNELS.POOLING, f"{export_folder}/include/kernels/")
list_configs.append("kernels/pooling.hpp")
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.POOLING,
name=self.name,
input_dims=self.inputs_dims[0],
output_dims=self.outputs_dims[0],
kernel=self.kernel,
stride=self.stride,
padding=self.padding,
pool_type="Max",
activation="Linear")
return list_configs
def forward(self, list_actions:list):
if not self.is_last:
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
KERNELS_FORWARD.POOLING,
name=self.name,
input_name=self.inputs[0].name(),
output_name=self.name
))
return list_actions
class GlobalAvgPoolCPP:
def __init__(self, node):
# node.get_operator().set_compute_output_dims(lambda x: [[x[0][0], x[0][1], 1, 1]])
pass
def export(self, export_folder:str, list_configs:list):
return list_configs
def forward(self, list_actions:list):
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
"cpp/templates/kernel_forward/pooling_forward.jinja",
name=self.name,
input_name=self.input_name,
output_name=self.name
))
return list_actions
@export_cpp_register("FC")
class FcCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
if len(self.inputs_dims[0]) == 2:
self.inputs_dims[0] = [self.inputs_dims[0][1], 1, 1]
elif len(self.inputs_dims[0]) == 4:
self.inputs_dims[0] = self.inputs_dims[0][1:]
if len(self.outputs_dims[0]) == 2:
self.outputs_dims[0] = [self.outputs_dims[0][1], 1, 1]
def export(self, export_folder:str, list_configs:list):
copyfile(KERNELS.FC, f"{export_folder}/include/kernels/")
copyfile(dirpath + "/kernels/macs.hpp", f"{export_folder}/include/kernels/")
copyfile(dirpath + "/kernels/activation.hpp", f"{export_folder}/include/kernels/")
list_configs.append("kernels/fullyconnected.hpp")
list_configs.append(f"layers/{self.name}.h")
generate_file(
f"{export_folder}/layers/{self.name}.h",
CONFIGURATIONS.FC,
name=self.name,
input_dims=self.inputs_dims[0],
output_dims=self.outputs_dims[0],
activation="Linear",
rescaling="NoScaling")
return list_configs
def forward(self, list_actions:list):
if not self.is_last:
list_actions.append(set_up_output(self.name, "float"))
list_actions.append(generate_action(
KERNELS_FORWARD.FC,
name=self.name,
input_name=self.inputs[0].name() if self.inputs[0] else self.name + "_input",
output_name=self.name,
weights_name=self.inputs[1].name(),
biases_name=self.inputs[2].name()
))
return list_actions
@export_cpp_register("Producer")
class ProducerCPP(ExportNode):
def __init__(self, node):
super().__init__(node)
self.values = np.array(self.operator.get_output(0))
def export(self, export_folder:str, list_configs:list):
list_configs.append(f"parameters/{self.name}.h")
export_to_static(self.name,
self.values.reshape(-1),
f"{export_folder}/parameters/{self.name}.h")
return list_configs
def forward(self, list_actions:list):
return list_actions
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment