Skip to content
Draft

Magia #166

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions Deeploy/Targets/Magia/Deployer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2026 ETH Zurich and University of Bologna
#
# SPDX-License-Identifier: Apache-2.0

from typing import Callable, Dict, List, Type

import numpy as np
import onnx_graphsurgeon as gs

from Deeploy.AbstractDataTypes import Pointer
from Deeploy.CommonExtensions.NetworkDeployers.SignPropDeployer import SignPropDeployer
from Deeploy.DeeployTypes import ConstantBuffer, DeploymentPlatform, NodeTemplate, TopologyOptimizer, VariableBuffer


class MagiaDeployer(SignPropDeployer):

def __init__(self,
graph: gs.Graph,
deploymentPlatform: DeploymentPlatform,
inputTypes: Dict[str, Type[Pointer]],
loweringOptimizer: TopologyOptimizer,
scheduler: Callable = lambda x: x,
name: str = 'DeeployNetwork',
default_channels_first = False,
deeployStateDir: str = "DeeployStateDir",
inputOffsets: Dict[str, int] = {}):

super().__init__(graph,
deploymentPlatform,
inputTypes,
loweringOptimizer,
scheduler,
name,
default_channels_first = default_channels_first,
deeployStateDir = deeployStateDir,
inputOffsets = inputOffsets)

self.loweringOptimizer.passes += [
# Extra optimizer passes on the lowering optimization pass. It seems to be different than the "normal" optimization passes defined on the Platform.
]
113 changes: 113 additions & 0 deletions Deeploy/Targets/Magia/Platform.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
# SPDX-FileCopyrightText: 2026 ETH Zurich and University of Bologna
#
# SPDX-License-Identifier: Apache-2.0

import numpy as np
import onnx_graphsurgeon as gs

from Deeploy.DeeployTypes import ConstantBuffer, DeploymentEngine, DeploymentPlatform, NetworkContext, NodeMapper, \
NodeTemplate, StructBuffer, TopologyOptimizer, TransientBuffer, VariableBuffer
from Deeploy.Targets.Generic.Bindings import BasicAddBindings
from Deeploy.Targets.Generic.Layers import AddLayer
from Deeploy.Targets.Generic.Parsers import AddParser
from Deeploy.Targets.Generic.Templates import AllocateTemplate as BasicAllocateTemplate
from Deeploy.Targets.Magia.Templates import AllocateTemplate, FreeTemplate

AddMapper = NodeMapper(AddParser(), BasicAddBindings)

MagiaMapping = {'Add': AddLayer([AddMapper])}


class MagiaVariableBuffer(VariableBuffer):

initTemplate = AllocateTemplate.magiaInitTemplate
allocTemplate = AllocateTemplate.magiaAllocateTemplate
deallocTemplate = FreeTemplate.magiaFreeTemplate

def _bufferRepresentation(self):

if hasattr(self, "_memoryLevel"):
memoryLevel = self._memoryLevel
else:
memoryLevel = None

return {
"type": self._instance,
"name": self.name,
"size": int(np.prod(self.shape)),
"_memoryLevel": memoryLevel
}


class MagiaTransientBuffer(TransientBuffer):

initTemplate = AllocateTemplate.magiaInitTemplate
allocTemplate = AllocateTemplate.magiaAllocateTemplate
deallocTemplate = FreeTemplate.magiaFreeTemplate

def _bufferRepresentation(self):

if hasattr(self, "_memoryLevel"):
memoryLevel = self._memoryLevel
else:
memoryLevel = None

return {"type": self._type, "name": self.name, "size": self.size, "_memoryLevel": memoryLevel}


class MagiaConstantBuffer(ConstantBuffer):

initTemplate = AllocateTemplate.magiaGlobalInitTemplate
allocTemplate = AllocateTemplate.magiaGlobalAllocateTemplate
deallocTemplate = FreeTemplate.magiaGlobalTemplate

def _bufferRepresentation(self):
operatorRepresentation = super()._bufferRepresentation()

if hasattr(self, "_memoryLevel"):
memoryLevel = self._memoryLevel
else:
memoryLevel = None

operatorRepresentation["_memoryLevel"] = memoryLevel

return operatorRepresentation


class MagiaStructBuffer(StructBuffer):

initTemplate = BasicAllocateTemplate.referenceStructInitTemplate
allocTemplate = BasicAllocateTemplate.referenceStructAllocateTemplate
deallocTemplate = NodeTemplate("")


MagiaOptimizer = TopologyOptimizer(
[
# Insert here the ONNX optimization passes.
],
name = "MagiaOptimizer")

_includeList = ["tile.h", "idma.h", "redmule.h", "eventunit.h"]


class MagiaMeshEngine(DeploymentEngine):

def __init__(self,
name: str,
Mapping = MagiaMapping,
initCode = "",
includeList = _includeList,
n_tiles: int = 4) -> None:
super().__init__(name, Mapping, initCode, includeList)
self.n_tiles = n_tiles


class MagiaPlatform(DeploymentPlatform):

def __init__(self,
engines = [MagiaMeshEngine("MagiaMesh")],
variableBuffer = MagiaVariableBuffer,
constantBuffer = MagiaConstantBuffer,
structBuffer = MagiaStructBuffer,
transientBuffer = MagiaTransientBuffer) -> None:
super().__init__(engines, variableBuffer, constantBuffer, structBuffer, transientBuffer)
25 changes: 25 additions & 0 deletions Deeploy/Targets/Magia/Templates/AllocateTemplate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# SPDX-FileCopyrightText: 2021 ETH Zurich and University of Bologna
#
# SPDX-License-Identifier: Apache-2.0

from Deeploy.DeeployTypes import NodeTemplate

magiaInitTemplate = NodeTemplate("${type.typeName} ${name};\n")

magiaAllocateTemplate = NodeTemplate("""
% if _memoryLevel == "L1":
${name} = (${type.typeName}) magia_l1_malloc(sizeof(${type.referencedType.typeName}) * ${size});\n
% elif _memoryLevel == "L2" or _memoryLevel is None:
${name} = (${type.typeName}) magia_l2_malloc(sizeof(${type.referencedType.typeName}) * ${size});\n
% endif
""")

magiaGlobalInitTemplate = NodeTemplate("""
% if _memoryLevel == "L1":
static ${type.referencedType.typeName} ${name}[${size}] = {${values}};\n
% elif _memoryLevel == "L2" or _memoryLevel is None:
extern ${type.referencedType.typeName} ${name}[${size}] = {${values}};\n
% endif
""")

magiaGlobalAllocateTemplate = NodeTemplate("")
15 changes: 15 additions & 0 deletions Deeploy/Targets/Magia/Templates/FreeTemplate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# SPDX-FileCopyrightText: 2023 ETH Zurich and University of Bologna
#
# SPDX-License-Identifier: Apache-2.0

from Deeploy.DeeployTypes import NodeTemplate

magiaFreeTemplate = NodeTemplate("""
% if _memoryLevel == "L1":
magia_l1_free(${name}, sizeof(${type.referencedType.typeName}) * ${size});
% elif _memoryLevel == "L2" or _memoryLevel is None:
magia_l2_free(${name}, sizeof(${type.referencedType.typeName}) * ${size});
% endif
""")

magiaGlobalTemplate = NodeTemplate("magia_l2_free(${name}, sizeof(${type.referencedType.typeName}) * ${size});")
Empty file.
Empty file.
22 changes: 22 additions & 0 deletions DeeployTest/deeployRunner_magia.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/usr/bin/env python
# SPDX-FileCopyrightText: 2025 ETH Zurich and University of Bologna
#
# SPDX-License-Identifier: Apache-2.0

import sys

from testUtils.deeployRunner import main

if __name__ == "__main__":

# Define parser setup callback to add Siracusa-specific arguments
def setup_parser(parser):
parser.add_argument('--tiles', type = int, default = 4, help = 'Number of mesh tiles (default: 4)')

sys.exit(
main(
default_platform = "Magia",
default_simulator = "none",
tiling_enabled = False,
parser_setup_callback = setup_parser,
))
31 changes: 19 additions & 12 deletions DeeployTest/generateNetwork.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from Deeploy.DeeployTypes import _NoVerbosity
from Deeploy.Logging import DEFAULT_LOGGER as log
from Deeploy.Targets.CortexM.Platform import CMSISPlatform
from Deeploy.Targets.Magia.Platform import MagiaMeshEngine, MagiaPlatform
from Deeploy.Targets.PULPOpen.Platform import PULPClusterEngine, PULPPlatform


Expand Down Expand Up @@ -88,6 +89,10 @@ def generateNetwork(args):
for cluster in clusters:
cluster.n_cores = args.cores

meshes = [engine for engine in platform.engines if isinstance(engine, MagiaMeshEngine)]
for mesh in meshes:
mesh.n_tiles = args.tiles

inputTypes = {}
inputOffsets = {}

Expand Down Expand Up @@ -166,34 +171,36 @@ def generateNetwork(args):
dest = 'debug',
action = 'store_true',
default = False,
help = 'Enable debugging mode\n')
help = 'Enable debugging mode.\n')
parser.add_argument('--profileUntiled',
action = 'store_true',
dest = 'profileUntiled',
default = False,
help = 'Profile Untiled for L2\n')
help = 'Profile Untiled for L2.\n')
parser.add_argument('--input-type-map',
nargs = '*',
default = [],
type = str,
help = '(Optional) mapping of input names to data types. '
'If not specified, types are inferred from the input data. '
'Example: --input-type-map input_0=int8_t input_1=float32_t ...')
'If not specified, types are inferred from the input data.'
'Example: --input-type-map input_0=int8_t input_1=float32_t ...\n')
parser.add_argument('--input-offset-map',
nargs = '*',
default = [],
type = str,
help = '(Optional) mapping of input names to offsets. '
'If not specified, offsets are set to 0. '
'Example: --input-offset-map input_0=0 input_1=128 ...')
'Example: --input-offset-map input_0=0 input_1=128 ...\n')
parser.add_argument('--shouldFail', action = 'store_true')
parser.add_argument(
"--cores",
type = int,
default = 1,
help =
"Number of cores on which the network is run. Currently, required for im2col buffer sizing on Siracusa. Default: 1.",
)
parser.add_argument("--cores",
type = int,
default = 1,
help = 'Number of cores on which the network is run.'
'Currently, required for im2col buffer sizing on Siracusa. Default: 1.\n')
parser.add_argument("--tiles",
type = int,
default = 1,
help = 'Number of tiles on which the network is run (mesh based architectures only).\n')
parser.set_defaults(shouldFail = False)

args = parser.parse_args()
Expand Down
30 changes: 18 additions & 12 deletions DeeployTest/testUtils/codeGenerate.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import numpy as np

from Deeploy.DeeployTypes import CodeGenVerbosity, ConstantBuffer, NetworkDeployer, VariableBuffer
from Deeploy.Targets.Magia.Platform import MagiaPlatform
from Deeploy.Targets.MemPool.Platform import MemPoolPlatform
from Deeploy.Targets.PULPOpen.Platform import MemoryPULPPlatform, MemoryPULPPlatformWrapper, PULPPlatform

Expand Down Expand Up @@ -119,12 +120,17 @@ def generateTestNetworkHeader(deployer: NetworkDeployer) -> str:
retStr += """
#ifndef __DEEPLOY_HEADER__
#define __DEEPLOY_HEADER__
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
"""

if not isinstance(deployer.Platform, MagiaPlatform):
retStr += """
#include <stdio.h>
#include <stdlib.h>
"""

retStr += deployer.generateIncludeString()
if isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper)):
if isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper, MagiaPlatform)):
retStr += """
void RunNetwork();
void InitNetwork();
Expand All @@ -148,15 +154,15 @@ def generateTestNetworkHeader(deployer: NetworkDeployer) -> str:
def generateTestNetworkImplementation(deployer: NetworkDeployer, verbosityCfg: CodeGenVerbosity) -> str:
retStr = ""

retStr += """#include <stdio.h>
#include <stdlib.h>
#include <math.h>
"""
retStr += deployer.generateIncludeString()
retStr += """
if not isinstance(deployer.Platform, MagiaPlatform):
retStr += """#include <stdio.h>
#include <stdlib.h>
#include <math.h>
"""
retStr += deployer.generateIncludeString()

retStr += """
#include "Network.h"

"""

retStr += deployer.generateBufferInitializationCode()
Expand All @@ -168,7 +174,7 @@ def generateTestNetworkImplementation(deployer: NetworkDeployer, verbosityCfg: C
retStr += """
void RunNetwork(__attribute__((unused)) uint32_t core_id, __attribute__((unused)) uint32_t numThreads){
"""
elif isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper)):
elif isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper, MagiaPlatform)):
retStr += """
void RunNetwork(){
"""
Expand All @@ -180,7 +186,7 @@ def generateTestNetworkImplementation(deployer: NetworkDeployer, verbosityCfg: C
retStr += deployer.generateInferenceInitializationCode()

retStr += deployer.generateFunction(verbosityCfg)
if isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper)):
if isinstance(deployer.Platform, (PULPPlatform, MemoryPULPPlatform, MemoryPULPPlatformWrapper, MagiaPlatform)):
retStr += """
}

Expand Down
16 changes: 11 additions & 5 deletions DeeployTest/testUtils/core/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,10 @@ def run_simulation(config: DeeployTestConfig, skip: bool = False) -> TestResult:
return test_result


def run_complete_test(config: DeeployTestConfig, skipgen: bool = False, skipsim: bool = False) -> TestResult:
def run_complete_test(config: DeeployTestConfig,
skipgen: bool = False,
skipbuild: bool = False,
skipsim: bool = False) -> TestResult:
"""
Run a complete test: generate, configure, build, and simulate.
"""
Expand All @@ -216,11 +219,14 @@ def run_complete_test(config: DeeployTestConfig, skipgen: bool = False, skipsim:
# Step 1: Generate network
generate_network(config, skip = skipgen)

# Step 2: Configure CMake
configure_cmake(config)
if skipbuild:
log.info(f"Skipping cmake configuration and binary building for {config.test_name}")
else:
# Step 2: Configure CMake
configure_cmake(config)

# Step 3: Build binary
build_binary(config)
# Step 3: Build binary
build_binary(config)

# Step 4: Run simulation
result = run_simulation(config, skip = skipsim)
Expand Down
Loading
Loading