Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

T spy code changes #1541

Closed
wants to merge 19 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def __push_bot_stop(self, label, connection):
with self.__lock:
self.__ready = False

def _receive_retina_data(self, data):
def _receive_retina_data(self, data: bytearray):
"""
Receive retina packets from the PushBot and converts them into
neuron spikes within the spike injector system.
Expand Down Expand Up @@ -134,7 +134,7 @@ def _receive_retina_data(self, data):
self.__next_data = data[i:i+1]

# Filter out the usable data
data_filtered = numpy.fromstring(data_all, dtype=numpy.uint16)
data_filtered = numpy.frombuffer(data_all, dtype=numpy.uint16)
y_values = (data_filtered >> self.__orig_y_shift) & self.__y_mask
x_values = (data_filtered >> self.__orig_x_shift) & self.__x_mask
polarity = (data_filtered >> _P_SHIFT) & _P_MASK
Expand Down
3 changes: 2 additions & 1 deletion spynnaker/pyNN/external_devices_models/spif_input_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,10 @@ def __init__(self, pipe, n_neurons, n_neurons_per_partition,
self.__index_by_slice = dict()

self.__pipe = pipe
self.__base_key = base_key
if self.__base_key is None:
self.__base_key = SPIFInputDevice.__n_devices
else:
self.__base_key = base_key
self.__base_key = self.__base_key << self.__neuron_bits
SPIFInputDevice.__n_devices += 1

Expand Down
14 changes: 9 additions & 5 deletions spynnaker/pyNN/external_devices_models/spif_output_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,12 @@ def start_resume_commands(self) -> Iterable[MultiCastCommand]:
# we don't know the key or mask of the incoming link...
commands = list()
for i, part in enumerate(self.__incoming_partitions):
pop_vertex = part.pre_vertex
assert isinstance(pop_vertex, PopulationApplicationVertex)
commands.append(set_xp_key_delayed(i, self._get_set_key_payload))
commands.append(set_xp_mask_delayed(i, self._get_set_mask_payload))
if part.pre_vertex in self.__output_key_and_mask:
key, mask = self.__output_key_and_mask[part.pre_vertex]
if pop_vertex in self.__output_key_and_mask:
key, mask = self.__output_key_and_mask[pop_vertex]
commands.append(set_distiller_key(i, key))
commands.append(set_distiller_mask(i, mask))
else:
Expand All @@ -213,7 +215,7 @@ def start_resume_commands(self) -> Iterable[MultiCastCommand]:
commands.append(set_distiller_mask_delayed(
i, self._get_set_dist_mask_payload))
commands.append(set_distiller_shift(
i, part.pre_vertex.n_colour_bits))
i, pop_vertex.n_colour_bits))
return commands

@property
Expand All @@ -232,12 +234,14 @@ def get_device_output_keys(self) -> Dict[MachineVertex,
all_keys: Dict[MachineVertex, List[Tuple[int, int]]] = dict()
routing_infos = SpynnakerDataView.get_routing_infos()
for i, part in enumerate(self.__incoming_partitions):
if part.pre_vertex in self.__output_key_and_mask:
pop_vertex = part.pre_vertex
assert isinstance(pop_vertex, PopulationApplicationVertex)
if pop_vertex in self.__output_key_and_mask:
key, mask = self.__output_key_and_mask[part.pre_vertex]
else:
key = i << self.__output_key_shift
mask = self._get_set_dist_mask_payload(i)
shift = part.pre_vertex.n_colour_bits
shift = pop_vertex.n_colour_bits
for m_vertex in part.pre_vertex.splitter.get_out_going_vertices(
part.identifier):
atom_keys: Iterable[Tuple[int, int]] = list()
Expand Down
8 changes: 3 additions & 5 deletions spynnaker/pyNN/extra_algorithms/delay_support_adder.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def __examine_edge_for_delays_to_add(
"""
# figure the max delay and if we need a delay extension
n_stages, steps_per_stage, need_delay_ext = self._check_delay_values(
edge, edge.synapse_information)
edge)

# if we need a delay, add it to the app graph.
if need_delay_ext:
Expand Down Expand Up @@ -187,21 +187,19 @@ def _create_delay_app_vertex_and_pre_edge(
n_delay_stages, delay_per_stage)
return delay_app_vertex

def _check_delay_values(self, app_edge, synapse_infos):
def _check_delay_values(self, app_edge):
"""
Checks the delay required from the user defined max, the max delay
supported by the post vertex splitter and the delay Extensions.

:param ApplicationEdge app_edge: the undelayed application edge
:param iterable[SynapseInformation] synapse_infos:
the synapse information objects
:return: tuple(n_delay_stages, delay_steps_per_stage, extension_needed)
"""
# get max delay required
max_delay_needed_ms = max(
synapse_info.synapse_dynamics.get_delay_maximum(
synapse_info.connector, synapse_info)
for synapse_info in synapse_infos)
for synapse_info in app_edge.synapse_infos)

# get if the post vertex needs a delay extension
post_splitter = app_edge.post_vertex.splitter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from contextlib import contextmanager
import logging
import os
import sys
from typing import Dict, Tuple

import numpy
Expand All @@ -37,7 +38,7 @@ def _print_all():
Update the numpy print options to display everything.
"""
print_opts = numpy.get_printoptions()
numpy.set_printoptions(threshold=numpy.nan)
numpy.set_printoptions(threshold=sys.maxsize)
try:
yield
finally:
Expand Down
10 changes: 0 additions & 10 deletions spynnaker/pyNN/models/common/eieio_spike_recorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,3 @@ def record(self, new_state: bool):
Old method assumed to be spikes.
"""
self.__record = bool(new_state)

def set_recording(self, new_state: bool, sampling_interval=None):
"""
:param bool new_state:
:param None sampling_interval: not supported functionality
"""
if sampling_interval is not None:
logger.warning("Sampling interval currently not supported for "
"SpikeSourceArray so being ignored")
self.__record = bool(new_state)
6 changes: 3 additions & 3 deletions spynnaker/pyNN/models/common/local_only_2d_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ def get_delay_for_source(incoming: "Projection") -> Tuple[
delay = s_info.synapse_dynamics.delay
steps = delay * SpynnakerDataView.get_simulation_time_step_per_ms()
max_delay = app_edge.post_vertex.splitter.max_support_delay()
local_delay = steps % max_delay
delay_stage = 0
local_delay = int(steps % max_delay)
delay_stage: int = 0
pre_vertex: ColouredApplicationVertex = app_edge.pre_vertex
if steps > max_delay:
delay_stage = (steps // max_delay) - 1
delay_stage = int(steps // max_delay) - 1
delay_edge = app_edge.delay_edge
assert delay_edge is not None
pre_vertex = delay_edge.pre_vertex
Expand Down
12 changes: 6 additions & 6 deletions spynnaker/pyNN/models/common/parameter_holder.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,13 @@ def _get_data_items(self) -> Dict[str, _BaseValueType]:
def __getitem__(self, s):
data = self._get_data_items()
if self.__single_key is not None:
if not isinstance(s, int):
raise KeyError("As there is only one array held "
"only int parameter are valid")
return data[self.__single_key][s]
if not isinstance(s, str):
raise KeyError("As multiple arrays held "
"only str parameter are valid")
return data[s]

def __len__(self) -> int:
Expand Down Expand Up @@ -145,12 +151,6 @@ def __contains__(self, item: str) -> bool:
return item in data[self.__single_key]
return item in data

def __getattr__(self, name: str):
data = self._get_data_items()
if self.__single_key is not None:
return getattr(data[self.__single_key], name)
return getattr(data, name)

def __eq__(self, other: Any) -> bool:
data = self._get_data_items()
if self.__single_key is not None:
Expand Down
50 changes: 31 additions & 19 deletions spynnaker/pyNN/models/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import logging
from types import MappingProxyType
from typing import (
Any, Callable, FrozenSet, Iterable, List, Mapping, Optional)
Any, Callable, FrozenSet, Iterable, List, Mapping, Optional, Tuple)
from spinn_utilities.classproperty import classproperty
from spinn_utilities.log import FormatAdapter

Expand Down Expand Up @@ -118,35 +118,41 @@ def default_initial_values(state_variables: Iterable[str]) -> Callable:
:param iterable(str) state_variables:
The names of the arguments that are state variables
"""
def wrap(method):
def wrap(method: Callable) -> Callable:
"""
Wraps the init method with a check method
:param method: init method to wrap
:return:
"""
# pylint: disable=protected-access
# Find the real method in case we use multiple of these decorators
wrapped = method
while hasattr(method, "_method"):
method = getattr(method, "_method")

# Store the state variables of the method to be used later
method._state_variables = frozenset(state_variables)
method._state_variables = ( # type: ignore[attr-defined]
frozenset(state_variables))
method_args = inspect.getfullargspec(method)

def wrapper(*args, **kwargs):
def wrapper(*args: Any, **kwargs: Any) -> None:
# Check for state variables that have been specified in cell_params
args_provided = method_args.args[:len(args)]
args_provided.extend([
arg for arg in kwargs if arg in method_args.args])
variables = method._state_variables # type: ignore[attr-defined]
for arg in args_provided:
if arg in method._state_variables:
if arg in variables:
logger.warning(
"Formal PyNN specifies that {} should be set using "
"initial_values not cell_params", arg)
wrapped(*args, **kwargs)

# Store the real method in the returned object
wrapper._method = method
wrapper._method = method # type: ignore[attr-defined]
return wrapper
return wrap


def defaults(cls: type) -> type:
"""
Get the default parameters and state variables from the arguments to
Expand Down Expand Up @@ -196,6 +202,9 @@ class AbstractProvidesDefaults(object):
@default_initial_values decorators with values read from the init.
"""

__cashed_defaults: Optional[Mapping[str, Any]] = None
__cashed_initials: Optional[Mapping[str, Any]] = None

@classmethod
def __fill_in_defaults(cls):
"""
Expand All @@ -215,7 +224,7 @@ def __fill_in_defaults(cls):
default_args = ([] if init_args.args is None else
init_args.args[n_args - n_defaults:])
if init_args.defaults is None:
default_values = []
default_values: Tuple = ()
else:
default_values = init_args.defaults

Expand All @@ -241,16 +250,15 @@ def __fill_in_defaults(cls):
_check_args(params.union(svars), default_args, init)

# fill in the defaults so this method is only called once
cls.default_parameters = {}
cls.default_initial_values = {}
__defaults = {}
__initials = {}
for arg, value in zip(default_args, default_values):
if arg in params:
cls.default_parameters[arg] = value
__defaults[arg] = value
elif arg in svars:
cls.default_initial_values[arg] = value
cls.default_parameters = MappingProxyType(cls.default_parameters)
cls.default_initial_values = (
MappingProxyType(cls.default_initial_values))
__initials[arg] = value
cls.__cashed_defaults = MappingProxyType(__defaults)
cls.__cashed_initials = MappingProxyType(__initials)

@classproperty
def default_parameters( # pylint: disable=no-self-argument
Expand All @@ -265,8 +273,10 @@ def default_parameters( # pylint: disable=no-self-argument
this will be all the init parameters with a default value
less any defined in @default_initial_values
"""
cls.__fill_in_defaults()
return cls.default_parameters
if cls.__cashed_defaults is None:
cls.__fill_in_defaults()
assert cls.__cashed_defaults is not None
return cls.__cashed_defaults

@classproperty
def default_initial_values( # pylint: disable=no-self-argument
Expand All @@ -284,5 +294,7 @@ def default_initial_values( # pylint: disable=no-self-argument

If neither decorator is used this will be an empty Mapping
"""
cls.__fill_in_defaults()
return cls.default_initial_values
if cls.__cashed_initials is None:
cls.__fill_in_defaults()
assert cls.__cashed_initials is not None
return cls.__cashed_initials
Original file line number Diff line number Diff line change
Expand Up @@ -201,13 +201,13 @@ def get_n_connections_from_pre_vertex_maximum(
n_post_atoms / float(synapse_info.n_post_neurons),
1.0)
max_in_slice = max(utility_calls.get_probable_maximum_selected(
self.__num_synapses, self.__num_synapses, prob_in_slice), 1.0)
self.__num_synapses, self.__num_synapses, prob_in_slice), 1)

# Similarly if the chance of there being one in a row is 0, there will
# probably be 1
prob_in_row = 1.0 / synapse_info.n_pre_neurons
n_connections = max(utility_calls.get_probable_maximum_selected(
self.__num_synapses, max_in_slice, prob_in_row), 1.0)
self.__num_synapses, max_in_slice, prob_in_row), 1)

if min_delay is None or max_delay is None:
return int(math.ceil(n_connections))
Expand Down
Loading
Loading