multilayered
MultiLayered (Module)
¶
Source code in evotorch/neuroevolution/net/multilayered.py
class MultiLayered(nn.Module):
def __init__(self, *layers: nn.Module):
super().__init__()
self._submodules = nn.ModuleList(layers)
def forward(self, x: torch.Tensor, h: Optional[dict] = None):
if h is None:
h = {}
new_h = {}
for i, layer in enumerate(self._submodules):
layer_h = h.get(i, None)
if layer_h is None:
layer_result = layer(x)
else:
layer_result = layer(x, h[i])
if isinstance(layer_result, tuple):
if len(layer_result) == 2:
x, layer_new_h = layer_result
else:
raise ValueError(
f"The layer number {i} returned a tuple of length {len(layer_result)}."
f" A tensor or a tuple of two elements was expected."
)
elif isinstance(layer_result, torch.Tensor):
x = layer_result
layer_new_h = None
else:
raise TypeError(
f"The layer number {i} returned an object of type {type(layer_result)}."
f" A tensor or a tuple of two elements was expected."
)
if layer_new_h is not None:
new_h[i] = layer_new_h
if len(new_h) == 0:
return x
else:
return x, new_h
def __iter__(self):
return self._submodules.__iter__()
def __getitem__(self, i):
return self._submodules[i]
def append(self, module: nn.Module):
self._submodules.append(module)
forward(self, x, h=None)
¶
Defines the computation performed at every call.
Should be overridden by all subclasses.
.. note::
Although the recipe for forward pass needs to be defined within
this function, one should call the :class:Module
instance afterwards
instead of this since the former takes care of running the
registered hooks while the latter silently ignores them.
Source code in evotorch/neuroevolution/net/multilayered.py
def forward(self, x: torch.Tensor, h: Optional[dict] = None):
if h is None:
h = {}
new_h = {}
for i, layer in enumerate(self._submodules):
layer_h = h.get(i, None)
if layer_h is None:
layer_result = layer(x)
else:
layer_result = layer(x, h[i])
if isinstance(layer_result, tuple):
if len(layer_result) == 2:
x, layer_new_h = layer_result
else:
raise ValueError(
f"The layer number {i} returned a tuple of length {len(layer_result)}."
f" A tensor or a tuple of two elements was expected."
)
elif isinstance(layer_result, torch.Tensor):
x = layer_result
layer_new_h = None
else:
raise TypeError(
f"The layer number {i} returned an object of type {type(layer_result)}."
f" A tensor or a tuple of two elements was expected."
)
if layer_new_h is not None:
new_h[i] = layer_new_h
if len(new_h) == 0:
return x
else:
return x, new_h