~tfardet/nngt-developers

NNGT: Simulation/Analysis - NEST3 support and bugfix for SWP v4 SUPERSEDED

tfardet: 1
 Simulation/Analysis - NEST3 support and bugfix for SWP

 18 files changed, 349 insertions(+), 132 deletions(-)
#527962 .build.yml cancelled
Export patchset (mbox)
How do I use this?

Copy & paste the following snippet into your terminal to import this patchset into git:

curl -s https://lists.sr.ht/~tfardet/nngt-developers/patches/23380/mbox | git am -3
Learn more about email & git
View this thread in the archives

[PATCH NNGT v4] Simulation/Analysis - NEST3 support and bugfix for SWP Export this patch

This patch provides full support for NEST 3 as well as some fixes for
NEST 2 and tests for the simulation functions.
It also fixes an extra square that was missed in 223231a2.		
---
 .build.yml                             |   4 +-
 doc/conf.py                            |  10 +-
 doc/examples/basic_nest_network.py     |  21 ++++-
 doc/examples/introduction_to_groups.py |   4 +-
 doc/examples/nest_network.py           |  17 +++-
 doc/examples/random_balanced.py        |  41 ++++----
 doc/user/nest-interaction.rst          |   6 +-
 doc/user/neural-groups.rst             |  16 ++--
 doc/user/tutorial.rst                  |   4 +-
 nngt/analysis/activity_analysis.py     |  40 +++++---
 nngt/analysis/graph_analysis.py        |   2 +-
 nngt/lib/graph_helpers.py              |   8 +-
 nngt/lib/sorting.py                    |  30 ++++--
 nngt/simulation/nest_activity.py       |  14 +--
 nngt/simulation/nest_graph.py          |  24 +++--
 nngt/simulation/nest_plot.py           |  73 +++++++++------
 nngt/simulation/nest_utils.py          |  43 +++++----
 testing/test_nest.py                   | 124 +++++++++++++++++++++++++
 18 files changed, 349 insertions(+), 132 deletions(-)
 create mode 100644 testing/test_nest.py

diff --git a/.build.yml b/.build.yml
index c342ac7..30ffb98 100644
--- a/.build.yml
+++ b/.build.yml
@@ -8,11 +8,13 @@ tasks:
        cd NNGT
        python3 extra/check_headers.py
    - setup: |
        sudo apt install -y software-properties-common
        sudo sh -c 'echo -n "deb https://downloads.skewed.de/apt focal main\n" >> /etc/apt/sources.list'
        sudo apt-key adv --keyserver keys.openpgp.org --recv-key 612DEFB798507F25
        sudo add-apt-repository -y ppa:nest-simulator/nest
        sudo apt-get update -qq
        sudo apt install -y gcc libcairo2-dev pkg-config build-essential autoconf automake python3-dev libblas-dev
        sudo apt install -y liblapack-dev libatlas-base-dev gfortran libxml2-dev openmpi-bin libopenmpi-dev libgmp-dev
        sudo apt install -y nest liblapack-dev libatlas-base-dev gfortran libxml2-dev openmpi-bin libopenmpi-dev libgmp-dev
        sudo apt install -y python3-pip python3-tk libigraph0v5 libigraph0-dev python3-graph-tool
        pip3 install numpy scipy cython mpi4py
        pip3 install networkx python-igraph
diff --git a/doc/conf.py b/doc/conf.py
index 8c5d310..059d4b2 100755
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -25,6 +25,7 @@ import os, errno
import shlex
import re
import fnmatch
import importlib.util as imputil

import sphinx_bootstrap_theme

@@ -52,7 +53,9 @@ If on rtd, the graph libraries are not available so they need to be mocked
'''

try:
    import nest
    if imputil.find_spec("nest") is None:
        # NEST is not available
        raise ImportError
except:
    import mock
    mock_object = mock.Mock(__name__ = "Mock", __bases__ = (object,))
@@ -105,12 +108,13 @@ import nngt

# import simulation & geospatial explicitely to avoid import conflict with
# lazy load
import nngt.simulation
try:
    import nngt.geospatial
except:
    pass

import nngt.simulation

from autosum import gen_autosum

# find all *.in files
@@ -372,7 +376,7 @@ html_favicon = 'images/nngt_ico.png'
html_static_path = ['_static']

# Add permalinks to headers
html_add_permalinks = "#"
html_permalinks_icon = "#"

# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
diff --git a/doc/examples/basic_nest_network.py b/doc/examples/basic_nest_network.py
index 32096ec..5acdcf4 100644
--- a/doc/examples/basic_nest_network.py
+++ b/doc/examples/basic_nest_network.py
@@ -104,15 +104,28 @@ if nngt.get_config('with_nest'):

    # sign of NNGT versus NEST inhibitory connections
    igroup = net.population["inhibitory"]

    # in NNGT
    iedges = net.get_edges(source_node=igroup.ids)
    w_nngt = set(net.get_weights(edges=iedges))

    # in NEST
    iconn  = nest.GetConnections(
        source=list(net.population["inhibitory"].nest_gids),
        target=list(net.population.nest_gids))
    try:
        # nest 2
        iconn  = nest.GetConnections(
            source=list(net.population["inhibitory"].nest_gids),
            target=list(net.population.nest_gids))
    except:
        # nest 3
        import nest
        s = nest.NodeCollection(net.population["inhibitory"].nest_gids)
        t = nest.NodeCollection(net.population.nest_gids)

        iconn  = nest.GetConnections(source=s, target=t)

    w_nest = set(nest.GetStatus(iconn, "weight"))
    # in NNGT, inhibitory weights are positive to work with graph analysis

    # In NNGT, inhibitory weights are positive to work with graph analysis
    # methods; they are automatically converted to negative weights in NEST
    print("NNGT weights:", w_nngt, "versus NEST weights", w_nest)

diff --git a/doc/examples/introduction_to_groups.py b/doc/examples/introduction_to_groups.py
index 84d9277..060c43f 100644
--- a/doc/examples/introduction_to_groups.py
+++ b/doc/examples/introduction_to_groups.py
@@ -119,6 +119,8 @@ fsi = NeuralGroup(200, neuron_type=-1, neuron_model="iaf_psc_alpha",
# Creating neuronal populations #
# --------------------------- '''

pop = NeuralPop.from_groups((pyr, fsi))

# making populations from scratch
pop = nngt.NeuralPop(with_models=False)              # empty population
pop.create_group(200, "first_group")                 # create excitatory group
@@ -162,7 +164,7 @@ print("E/I population has size", ei_pop2.size,

# optional synaptic properties
syn_spec = {
    'default': {"model": "tsodyks2_synapse"},           # default connections
    'default': {"synaptic_model": "tsodyks2_synapse"},  # default connections
    ("pyramidal_cells", "pyramidal_cells"): {"U": 0.6}  # change a parameter
}

diff --git a/doc/examples/nest_network.py b/doc/examples/nest_network.py
index e586fc2..dcf7f89 100644
--- a/doc/examples/nest_network.py
+++ b/doc/examples/nest_network.py
@@ -51,11 +51,20 @@ adapt = nngt.NeuralGroup(
    nodes=200, neuron_model='aeif_psc_alpha', neuron_type=1,
    neuron_param=base_params)

model = 'model'

try:
    import nest
    nest.NodeCollection()
    model = 'synapse_model'
except:
    pass

synapses = {
    'default': {'model': 'tsodyks2_synapse'},
    ('oscillators', 'bursters'): {'model': 'tsodyks2_synapse', 'U': 0.6},
    ('oscillators', 'oscillators'): {'model': 'tsodyks2_synapse', 'U': 0.7},
    ('oscillators', 'adaptive'): {'model': 'tsodyks2_synapse', 'U': 0.5}
    'default': {model: 'tsodyks2_synapse'},
    ('oscillators', 'bursters'): {model: 'tsodyks2_synapse', 'U': 0.6},
    ('oscillators', 'oscillators'): {model: 'tsodyks2_synapse', 'U': 0.7},
    ('oscillators', 'adaptive'): {model: 'tsodyks2_synapse', 'U': 0.5}
}

'''
diff --git a/doc/examples/random_balanced.py b/doc/examples/random_balanced.py
index a3cb540..bf164cd 100644
--- a/doc/examples/random_balanced.py
+++ b/doc/examples/random_balanced.py
@@ -31,9 +31,6 @@ import nngt
import nngt.generation as ng


# np.random.seed(0)


'''
Simulation parameters
'''
@@ -46,7 +43,7 @@ dt = 0.1         # the resolution in ms
simtime = 1000.  # Simulation time in ms
delay = 1.5      # synaptic delay in ms

g = 5.0          # ratio inhibitory weight/excitatory weight
g = 4.0          # ratio inhibitory weight/excitatory weight
eta = 2.0        # external rate relative to threshold rate
epsilon = 0.1    # connection probability

@@ -99,13 +96,13 @@ neuron_params = {"C_m": CMem,
                 "V_th": theta}

J_unit = ComputePSPnorm(tauMem, CMem, tauSyn)
J = 0.2            # postsynaptic amplitude in mV
J = 0.8            # postsynaptic amplitude in mV
J_ex = J / J_unit  # amplitude of excitatory postsynaptic current
J_in = g * J_ex    # amplitude of inhibitory postsynaptic current

nu_th = (theta * CMem) / (J_ex * CE * np.e * tauMem * tauSyn)
nu_ex = eta * nu_th
p_rate = 1000. * nu_ex * CE
p_rate = 400. * nu_ex * CE


'''
@@ -118,13 +115,11 @@ pop = nngt.NeuralPop.exc_and_inhib(

net = nngt.Network(population=pop)

ng.connect_groups(net, "excitatory", ["excitatory", "inhibitory"],
                  "gaussian_degree", degree_type="out", avg=CE, std=0.2*CE,
                  weights=J_ex, delays=delay)
ng.connect_groups(net, pop, "excitatory", "gaussian_degree", degree_type="in",
                  avg=CE, std=0.1*CE, weights=J_ex, delays=delay)

ng.connect_groups(net, "inhibitory", ["excitatory", "inhibitory"],
                  "gaussian_degree", degree_type="out", avg=CI, std=0.2*CI,
                  weights=J_in, delays=delay)
ng.connect_groups(net, pop, "inhibitory", "gaussian_degree", degree_type="in",
                  avg=CI, std=0.1*CI, weights=J_in, delays=delay)


'''
@@ -136,7 +131,8 @@ def coefficient_of_variation(spikes):
    times = np.array(spikes["times"])
    nrns  = np.array(spikes["senders"])

    return np.nanmean([variation(np.diff(times[nrns == i])) for i in np.unique(nrns)])
    return np.nanmean([variation(np.diff(times[nrns == i]))
                       for i in np.unique(nrns)])


def cross_correlation(spikes, time, numbins=1000):
@@ -159,22 +155,20 @@ Send the network to NEST, set noise and randomize parameters
'''

if nngt.get_config('with_nest'):
    import nngt.simulation as ns
    import nest
    import nngt.simulation as ns
    from nngt.analysis import get_spikes

    nest.ResetKernel()

    nest.SetKernelStatus({"resolution": dt, "print_time": True,
                          "overwrite_files": True, 'local_num_threads': 4,
                          "rng_seeds": [5, 6, 7, 8]})
                          "overwrite_files": True, 'local_num_threads': 4})

    gids = net.to_nest()

    pg = ns.set_poisson_input(gids, rate=p_rate,
                              syn_spec={"weight": J_ex, "delay": delay})

    ns.set_minis(net, base_rate=0.1, weight=0.04)

    recorders, records = ns.monitor_groups(
        ["excitatory", "inhibitory"], network=net)

@@ -208,12 +202,13 @@ if nngt.get_config('with_nest'):
            show=True)

        # print the CV and CC
        data_exc = nest.GetStatus([recorders[0]], "events")[0]
        data_inh = nest.GetStatus([recorders[1]], "events")[0]
        data_exc = get_spikes(recorders[0], astype="np")
        data_inh = get_spikes(recorders[1], astype="np")

        spikes = {
            "times": list(data_exc["times"]) + list(data_inh["times"]),
            "senders": list(data_exc["senders"]) + list(data_inh["senders"]),
            "senders": list(data_exc[:, 0]) + list(data_inh[:, 0]),
            "times": list(data_exc[:, 1]) + list(data_inh[:, 1]),
        }

        print(coefficient_of_variation(spikes), cross_correlation(spikes, simtime))
        print(coefficient_of_variation(spikes),
              cross_correlation(spikes, simtime))
diff --git a/doc/user/nest-interaction.rst b/doc/user/nest-interaction.rst
index c10f31f..a66cf8c 100644
--- a/doc/user/nest-interaction.rst
+++ b/doc/user/nest-interaction.rst
@@ -7,12 +7,12 @@ Interacting with the NEST simulator
This section details how to create detailed neuronal networks, then run
simulations on them using the NEST simulator.

Readers are supposed to have a good grap of the way NEST handles neurons and
Readers are supposed to have a good grasp of the way NEST handles neurons and
models, and how to create and setup NEST nodes.
If this is not the case, please see the `NEST user doc`_ and the
`PyNEST tutorials`_ first.

NNGT tools with regard to NEST_ can be separated into
NNGT tools should work for NEST_ version 2 or 3; they can be separated into

* the structural tools (:class:`~nngt.Network`, :class:`~nngt.NeuralPop` ...)
  that are used to prepare the neuronal network and setup its properties and
@@ -119,7 +119,7 @@ Since we are using NEST, these properties are:
* the type of the neurons (``1`` for excitatory or ``-1`` for inhibitory)

.. literalinclude:: ../examples/nest_network.py
   :lines: 29-73
   :lines: 29-53, 63-83

Once this network is created, it can simply be sent to nest through the
command: ``gids = net.to_nest()``, and the NEST gids are returned.
diff --git a/doc/user/neural-groups.rst b/doc/user/neural-groups.rst
index 83258dc..4d2a2d4 100644
--- a/doc/user/neural-groups.rst
+++ b/doc/user/neural-groups.rst
@@ -116,7 +116,7 @@ To create a population, you can start from scratch by creating an empty
population, then adding groups to it:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 123-125
   :lines: 124-127

NNGT also provides a two default routine to create simple populations:

@@ -131,13 +131,13 @@ the time.
To create such populations, just use:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 131-122
   :lines: 132-134

Eventually, a population can be created from exiting groups using
:func:`~nngt.NeuralPop.from_groups`:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 144-145
   :lines: 147-148

Note that, here, we pass ``with_models=False`` to the population because these
groups were created without the information necessary to create a network in
@@ -157,7 +157,7 @@ Otherwise, one can build the population from groups that already contain these
properties, e.g. the previous ``pyr`` and ``fsi`` groups:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 163-169
   :lines: 165-171

.. warning::
    `syn_spec` can contain any synaptic model and parameters associated to the
@@ -189,12 +189,12 @@ and same with pyramidal neurons or interneurons.
First create the normal groups:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 181-195
   :lines: 183-197

Then make the metagroups for the layers:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 199-203
   :lines: 201-205

Note that I used :class:`MetaNeuralGroup` for layers 3 and 5 because it enables
to differenciate inhibitory and excitatory neurons using
@@ -205,12 +205,12 @@ Otherwise normal :class:`~nngt.MetaGroup` are equivalent and sufficient.
Create the population:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 206-207
   :lines: 208-209

Then add additional metagroups for cell types:

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 211-216
   :lines: 213-218

----

diff --git a/doc/user/tutorial.rst b/doc/user/tutorial.rst
index d7861e1..8556d9d 100644
--- a/doc/user/tutorial.rst
+++ b/doc/user/tutorial.rst
@@ -221,7 +221,7 @@ with specific connectivities using the
:func:`~nngt.generation.connect_groups` function.

.. literalinclude:: ../examples/introduction_to_groups.py
   :lines: 114-123, 164-176
   :lines: 103-127

For more details, see the full page on :ref:`neural_groups`.

@@ -241,7 +241,7 @@ Since we are using NEST, these properties are:
* the type of the neurons (``1`` for excitatory or ``-1`` for inhibitory)

.. literalinclude:: ../examples/nest_network.py
   :lines: 29-74
   :lines: 29-52, 62-83

Once this network is created, it can simply be sent to nest through the
command: ``gids = net.to_nest()``, and the NEST gids are returned.
diff --git a/nngt/analysis/activity_analysis.py b/nngt/analysis/activity_analysis.py
index 5017ffa..4da74c0 100755
--- a/nngt/analysis/activity_analysis.py
+++ b/nngt/analysis/activity_analysis.py
@@ -245,15 +245,16 @@ def get_spikes(recorder=None, spike_times=None, senders=None, astype="ssp"):
    (columns).
    '''
    if recorder is not None:
        from ..simulation.nest_utils import _get_nest_gids
        import nest
        data = nest.GetStatus(recorder[0])[0]["events"]
        data = nest.GetStatus(_get_nest_gids(recorder))[0]["events"]
        spike_times = data["times"]
        senders = data["senders"]
    elif spike_times is None and senders is None:
        from ..simulation.nest_utils import _get_nest_gids, spike_rec
        import nest
        nodes = nest.GetNodes(
            (0,), properties={'model': 'spike_detector'})
        data = nest.GetStatus(nodes[0])[0]["events"]
        nodes = nest.GetNodes(properties={'model': spike_rec})
        data = nest.GetStatus(nodes)[0]["events"]
        spike_times = data["times"]
        senders = data["senders"]

@@ -298,21 +299,29 @@ def _b2_from_data(ids, data):

def _fr_from_data(ids, data):
    fr = np.zeros(len(ids))
    T = float(np.max(data[:, 1]) - np.min(data[:, 1]))
    for i, neuron in enumerate(ids):
        ids = np.where(data[:, 0] == neuron)[0]
        fr[i] = len(ids) / T

    if len(data[:, 0]):
        T = float(np.max(data[:, 1]) - np.min(data[:, 1]))

        for i, neuron in enumerate(ids):
            ids = np.where(data[:, 0] == neuron)[0]
            fr[i] = len(ids) / T

    return fr


def _set_data_nodes(network, data, nodes):
    from ..simulation.nest_utils import _get_nest_gids

    if data is None:
        data = [[], []]

    if nodes is None:
        nodes = network.nest_gids
    else:
        nodes = network.nest_gids[nodes]
    return data, nodes

    return data, _get_nest_gids(nodes)


def _set_spike_data(data, spike_detector):
@@ -320,15 +329,24 @@ def _set_spike_data(data, spike_detector):
    Data must be [[], []]
    '''
    import nest
    from ..simulation.nest_utils import _get_nest_gids, spike_rec, nest_version

    if not len(data[0]):
        if spike_detector is None:
            spike_detector = nest.GetNodes(
                (0,), properties={'model': 'spike_detector'})[0]
            prop = {'model': spike_rec}
            if nest_version == 3:
                spike_detector = nest.GetNodes(properties=prop)
            else:
                spike_detector = nest.GetNodes((0,), properties=prop)[0]

        events = nest.GetStatus(spike_detector, "events")

        for ev_dict in events:
            data[0].extend(ev_dict["senders"])
            data[1].extend(ev_dict["times"])

    sorter = np.argsort(data[1])

    return np.array(data)[:, sorter].T


diff --git a/nngt/analysis/graph_analysis.py b/nngt/analysis/graph_analysis.py
index d6ba002..6c997be 100755
--- a/nngt/analysis/graph_analysis.py
+++ b/nngt/analysis/graph_analysis.py
@@ -444,7 +444,7 @@ def small_world_propensity(g, directed=None, use_global_clustering=False,
            delta_l, delta_c
    else:
        return 1 - np.sqrt(
            0.5*(np.clip(delta_l, 0, 1)**2 + np.clip(delta_c**2, 0, 1)**2))
            0.5*(np.clip(delta_l, 0, 1)**2 + np.clip(delta_c, 0, 1)**2))


def shortest_path(g, source, target, directed=True, weights=None):
diff --git a/nngt/lib/graph_helpers.py b/nngt/lib/graph_helpers.py
index b069a99..1ce069b 100755
--- a/nngt/lib/graph_helpers.py
+++ b/nngt/lib/graph_helpers.py
@@ -119,8 +119,10 @@ def _get_syn_param(src_name, src_group, tgt_name, tgt_group, syn_spec,
    Priority is given to source (presynaptic properties): they come last.
    '''
    group_keys = []

    for k in syn_spec.keys():
        group_keys.extend(k)

    group_keys = set(group_keys)

    src_type = src_group.neuron_type
@@ -129,6 +131,7 @@ def _get_syn_param(src_name, src_group, tgt_name, tgt_group, syn_spec,
    # entry for source type and target type
    dict_prop = syn_spec.get((src_type, tgt_type), {})
    key_prop = dict_prop.get(key, None)

    # entry for source type and target name
    if tgt_name in group_keys:
        dict_prop = syn_spec.get((src_type, tgt_name), dict_prop)
@@ -141,10 +144,11 @@ def _get_syn_param(src_name, src_group, tgt_name, tgt_group, syn_spec,
    if src_name in group_keys and tgt_name in group_keys:
        dict_prop = syn_spec.get((src_name, tgt_name), dict_prop)
        key_prop = dict_prop.get(key, key_prop)

    if key is not None:
        return deepcopy(key_prop)
    else:
        return deepcopy(dict_prop)

    return deepcopy(dict_prop)


def _get_dtype(value):
diff --git a/nngt/lib/sorting.py b/nngt/lib/sorting.py
index a79a222..d42c9b7 100755
--- a/nngt/lib/sorting.py
+++ b/nngt/lib/sorting.py
@@ -89,23 +89,37 @@ def _sort_neurons(sort, gids, network, data=None, return_attr=False):
    the neurons, i.e. an integer between 1 and N.
    '''
    from nngt.analysis import node_attributes, get_b2
    from nngt.simulation.nest_utils import nest_version

    min_nest_gid = network.nest_gids.min()
    max_nest_gid = network.nest_gids.max()

    sorting = np.zeros(max_nest_gid + 1)

    attribute = None
    sorted_ids = None

    if isinstance(sort, str):
        if sort == "firing_rate":
            # compute number of spikes per neuron
            spikes = np.bincount(data[:, 0].astype(int))
            if spikes.shape[0] < max_nest_gid: # one entry per neuron
                spikes.resize(max_nest_gid)

            # shift from 1 in NEST 3+
            max_entry = max_nest_gid + 1 if nest_version == 3 else max_nest_gid

            if spikes.shape[0] < max_entry: # one entry per neuron
                spikes.resize(max_entry)

            # sort them (neuron with least spikes arrives at min_nest_gid)
            sorted_ids = np.argsort(spikes)[min_nest_gid:] - min_nest_gid

            # get attribute
            idx_min = int(np.min(data[:, 0]))
            attribute = spikes[idx_min:] \
                        / (np.max(data[:, 1]) - np.min(data[:, 1]))
            if len(data[:, 0]):
                idx_min = int(np.min(data[:, 0]))
                attribute = spikes[idx_min:] \
                            / (np.max(data[:, 1]) - np.min(data[:, 1]))
            else:
                attribute = []
        elif sort.lower() == "b2":
            attribute = get_b2(network, data=data, nodes=gids)
            sorted_ids = np.argsort(attribute)
@@ -113,7 +127,7 @@ def _sort_neurons(sort, gids, network, data=None, return_attr=False):
            num_b2 = attribute.shape[0]
            if num_b2 < network.node_nb():
                spikes = np.bincount(data[:, 0])
                non_spiking = np.where(spikes[min_nest_gid] == 0)[0]
                non_spiking = np.where(spikes[min_nest_gid:] == 0)[0]
                sorted_ids.resize(network.node_nb())
                for i, n in enumerate(non_spiking):
                    sorted_ids[sorted_ids >= n] += 1
@@ -167,8 +181,8 @@ def _sort_neurons(sort, gids, network, data=None, return_attr=False):

    if return_attr:
        return sorting.astype(int), attribute
    else:
        return sorting.astype(int)

    return sorting.astype(int)


def _sort_groups(pop):
diff --git a/nngt/simulation/nest_activity.py b/nngt/simulation/nest_activity.py
index 4284175..33c6b85 100755
--- a/nngt/simulation/nest_activity.py
+++ b/nngt/simulation/nest_activity.py
@@ -31,7 +31,7 @@ import nest
import numpy as np

from nngt.lib import InvalidArgument, nonstring_container
from .nest_utils import nest_version, _get_nest_gids
from .nest_utils import nest_version, spike_rec, _get_nest_gids


__all__ = [
@@ -140,7 +140,7 @@ def get_recording(network, record, recorder=None, nodes=None):
    record : str or list
        Name of the record(s) to obtain.
    recorder : tuple of ints, optional (default: all multimeters)
        GID of the "spike_detector" objects recording the network activity.
        GID of the spike recorder objects recording the network activity.
    nodes : array-like, optional (default: all nodes)
        NNGT ids of the nodes for which the recording should be returned.

@@ -206,7 +206,7 @@ def get_recording(network, record, recorder=None, nodes=None):
    return values


def activity_types(spike_detector, limits, network=None,
def activity_types(spike_recorder, limits, network=None,
                   phase_coeff=(0.5, 10.), mbis=0.5, mfb=0.2, mflb=0.05,
                   skip_bursts=0, simplify=False, fignums=[], show=False):
    '''
@@ -218,7 +218,7 @@ def activity_types(spike_detector, limits, network=None,

    Parameters
    ----------
    spike_detector : NEST node(s) (tuple or list of tuples)
    spike_recorder : NEST node(s) (tuple or list of tuples)
        The recording device that monitored the network's spikes.
    limits : tuple of floats
        Time limits of the simulation region which should be studied (in ms).
@@ -273,13 +273,13 @@ def activity_types(spike_detector, limits, network=None,
    # check if there are several recorders
    senders, times = [], []

    if True in nest.GetStatus(spike_detector, "to_file"):
        for fpath in nest.GetStatus(spike_detector, "record_to"):
    if True in nest.GetStatus(spike_recorder, "to_file"):
        for fpath in nest.GetStatus(spike_recorder, "record_to"):
            data = _get_data(fpath)
            times.extend(data[:, 1])
            senders.extend(data[:, 0])
    else:
        for events in nest.GetStatus(spike_detector, "events"):
        for events in nest.GetStatus(spike_recorder, "events"):
            times.extend(events["times"])
            senders.extend(events["senders"])

diff --git a/nngt/simulation/nest_graph.py b/nngt/simulation/nest_graph.py
index 89ba809..aa218ad 100755
--- a/nngt/simulation/nest_graph.py
+++ b/nngt/simulation/nest_graph.py
@@ -70,10 +70,10 @@ def make_nest_network(network, send_only=None, weights=True):

    Returns
    -------
    gids : tuple (nodes in NEST)
        GIDs of the neurons in the network.
    gids : tuple or NodeCollection (nodes in NEST)
        GIDs of the neurons in the NEST network.
    '''
    gids = []
    gids = _get_nest_gids([])
    pop  = network.population

    send = list(network.population.keys())
@@ -128,7 +128,7 @@ def make_nest_network(network, send_only=None, weights=True):
            ia_nest_gids[current_size:current_size + group_size] = gids_tmp
            ia_nngt_nest[idx_nest] = gids_tmp
            current_size += group_size
            gids.extend(gids_tmp)
            gids += gids_tmp

    # conversions ids/gids
    network.nest_gids = ia_nngt_nest
@@ -147,10 +147,13 @@ def make_nest_network(network, send_only=None, weights=True):
    for src_name in send:
        src_group = pop[src_name]
        syn_sign = src_group.neuron_type

        # local connectivity matrix and offset to correct neuron id
        local_csr = csr_weights[src_group.ids, :]
        arr_idx = np.sort(src_group.ids).astype(int)

        local_csr = csr_weights[arr_idx, :]

        assert local_csr.shape[1] == network.node_nb()
        arr_idx  = np.sort(src_group.ids).astype(int)

        if len(src_group.ids) > 0 and pop.syn_spec is not None:
            # check whether custom synapses should be used
@@ -160,15 +163,20 @@ def make_nest_network(network, send_only=None, weights=True):
                tgt_group = pop[tgt_name]
                # get list of targets for each
                arr_tgt_idx = np.sort(tgt_group.ids).astype(int)

                # get non-wero entries (global NNGT ids)
                keep = local_csr[:, arr_tgt_idx].nonzero()

                local_tgt_ids = arr_tgt_idx[keep[1]]
                local_src_ids = arr_idx[keep[0]]

                if len(local_tgt_ids) and len(local_src_ids):
                    # get the synaptic parameters
                    syn_spec = _get_syn_param(
                        src_name, src_group, tgt_name, tgt_group, pop.syn_spec)

                    # using A1 to get data from matrix
                    if weights not in {None, False}:
                    if weights:
                        syn_spec[WEIGHT] = syn_sign *\
                            csr_weights[local_src_ids, local_tgt_ids].A1
                    else:
@@ -219,7 +227,7 @@ def make_nest_network(network, send_only=None, weights=True):
    # tell the populaton that the network it describes was sent to NEST
    network.population._sent_to_nest()

    return tuple(ia_nest_gids[:current_size])
    return gids


def get_nest_adjacency(id_converter=None):
diff --git a/nngt/simulation/nest_plot.py b/nngt/simulation/nest_plot.py
index f6e7b0f..892b33e 100755
--- a/nngt/simulation/nest_plot.py
+++ b/nngt/simulation/nest_plot.py
@@ -45,6 +45,7 @@ from nngt.lib.logger import _log_message
from nngt.plot import palette_discrete, markers
from nngt.plot.plt_properties import _set_new_plot, _set_ax_lims

from .nest_utils import nest_version, spike_rec, _get_nest_gids

logger = logging.getLogger(__name__)

@@ -72,7 +73,7 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
    ----------
    gid_recorder : tuple or list of tuples, optional (default: None)
        The gids of the recording devices. If None, then all existing
        "spike_detector"s are used.
        spike_recs are used.
    record : tuple or list, optional (default: None)
        List of the monitored variables for each device. If `gid_recorder` is
        None, record can also be None and only spikes are considered.
@@ -142,7 +143,8 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
        Lines containing the data that was plotted, grouped by figure.
    '''
    import matplotlib.pyplot as plt
    lst_rec, lst_labels, lines, axes, labels = [], [], {}, {}, {}
    recorders = _get_nest_gids([])
    lst_labels, lines, axes, labels = [], {}, {}, {}

    # normalize recorders and recordables
    if gid_recorder is not None:
@@ -152,27 +154,38 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
                                  'recorders, or contain one entry per '
                                  'recorder in `gid_recorder`')
        for rec in gid_recorder:
            if isinstance(gid_recorder[0], tuple):
                lst_rec.append(rec[0])
            if nest_version == 3:
                recorders = _get_nest_gids(gid_recorder)
            else:
                lst_rec.append(rec)
                if isinstance(gid_recorder[0], tuple):
                    recorders.append(rec)
                else:
                    recorders.append((rec,))
    else:
        lst_rec = nest.GetNodes(
            (0,), properties={'model': 'spike_detector'})[0]
        record = tuple("spikes" for _ in range(len(lst_rec)))
        prop = {'model': spike_rec}
        if nest_version == 3:
            recorders = nest.GetNodes(properties=prop)
        else:
            recorders = [
                (gid,) for gid in nest.GetNodes((0,), properties=prop)[0]
            ]

        record = tuple("spikes" for _ in range(len(recorders)))

    # get gids and groups
    gids = network.nest_gids if (gids is None and network is not None) \
                             else gids
           else gids

    if gids is None:
        gids = []
        for rec in lst_rec:
            gids.extend(nest.GetStatus([rec])[0]["events"]["senders"])

        for rec in recorders:
            gids.extend(nest.GetStatus(rec)[0]["events"]["senders"])

        gids = np.unique(gids)

    num_group = 1 if network is None else len(network.population)
    num_lines = max(num_group, len(lst_rec))
    num_lines = max(num_group, len(recorders))

    # sorting
    sorted_neurons = np.array([])
@@ -193,9 +206,9 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
            data = None
            if sort.lower() in ("firing_rate", "b2"):  # get senders
                data = [[], []]
                for rec in lst_rec:
                    info = nest.GetStatus([rec])[0]
                    if str(info["model"]) == "spike_detector":
                for rec in recorders:
                    info = nest.GetStatus(rec)[0]
                    if str(info["model"]) == spike_rec:
                        data[0].extend(info["events"]["senders"])
                        data[1].extend(info["events"]["times"])
                data = np.array(data).T
@@ -224,23 +237,23 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,

    # set labels
    if label is None:
        lst_labels = [None for _ in range(len(lst_rec))]
        lst_labels = [None for _ in range(len(recorders))]
    else:
        if isinstance(label, str):
            lst_labels = [label]
        else:
            lst_labels = label
        if len(label) != len(lst_rec):
        if len(label) != len(recorders):
            _log_message(logger, "WARNING",
                         'Incorrect length for `label`: expecting {} but got '
                         '{}.\nIgnoring.'.format(len(lst_rec), len(label)))
            lst_labels = [None for _ in range(len(lst_rec))]
                         '{}.\nIgnoring.'.format(len(recorders), len(label)))
            lst_labels = [None for _ in range(len(recorders))]

    datasets = []
    max_time = 0.

    for rec in lst_rec:
        info     = nest.GetStatus([rec])[0]
    for rec in recorders:
        info = nest.GetStatus(rec)[0]

        if len(info["events"]["times"]):
            max_time = max(max_time, np.max(info["events"]["times"]))
@@ -260,9 +273,9 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
            labels[info["model"]] = []
            lines[info["model"]] = []

        if str(info["model"]) == "spike_detector":
            if "spike_detector" in axes:
                axis = axes["spike_detector"]
        if str(info["model"]) == spike_rec:
            if spike_rec in axes:
                axis = axes[spike_rec]
            c = colors[num_raster]
            times, senders = info["events"]["times"], info["events"]["senders"]
            sorted_ids = sorted_neurons[senders]
@@ -278,10 +291,10 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
            num_raster += 1
            if l:
                fig_raster = l[0].figure.number
                fignums['spike_detector'] = fig_raster
                axes['spike_detector'] = l[0].axes
                labels["spike_detector"].append(lbl)
                lines["spike_detector"].extend(l)
                fignums[spike_rec] = fig_raster
                axes[spike_rec] = l[0].axes
                labels[spike_rec].append(lbl)
                lines[spike_rec].extend(l)
                if histogram:
                    axes['histogram'] = l[1].axes
        elif "detector" in str(info["model"]):
@@ -371,8 +384,8 @@ def plot_activity(gid_recorder=None, record=None, network=None, gids=None,
            lines[info["model"]].extend(lines_tmp)
            num_meter += 1

    if "spike_detector" in axes:
        ax = axes['spike_detector']
    if spike_rec in axes:
        ax = axes[spike_rec]

        if limits is not None:
            ax.set_xlim(limits[0], limits[1])
diff --git a/nngt/simulation/nest_utils.py b/nngt/simulation/nest_utils.py
index b4769e9..6d39560 100755
--- a/nngt/simulation/nest_utils.py
+++ b/nngt/simulation/nest_utils.py
@@ -45,8 +45,10 @@ from nngt.lib.sorting import _sort_groups
try:
    from nest import NodeCollection
    nest_version = 3
    spike_rec = 'spike_recorder'
except ImportError:
    nest_version = 2
    spike_rec = 'spike_detector'


__all__ = [
@@ -87,7 +89,7 @@ def set_noise(gids, mean, std):
    noise = nest.Create("noise_generator", params={"mean": mean, "std": std},
                        _warn=False)

    nest.Connect(noise, list(gids), _warn=False)
    nest.Connect(noise, _get_nest_gids(gids), _warn=False)

    return noise

@@ -191,17 +193,22 @@ def set_minis(network, base_rate, weight, syn_type=1, nodes=None, gids=None):
    pgs        = nest.Create("poisson_generator", len(deg_set), _warn=False)

    for d, pg in zip(deg_set, pgs):
        nest.SetStatus([pg], {"rate": d*base_rate}, _warn=False)
        if nest_version == 2:
            pg = (pg,)

        nest.SetStatus(pg, {"rate": d*base_rate}, _warn=False)

    # connect
    for i, n in enumerate(nodes):
        gid, d = gids[i], degrees[n]

        pg = pgs[map_deg_pg[d]]

        if nest_version == 2:
            gid = (gid,)
            pg = (pg,)

        w  = weight[i]
        pg = [pgs[map_deg_pg[d]]]

        nest.Connect(pg, gid, syn_spec={'weight': w}, _warn=False)

@@ -232,7 +239,7 @@ def set_step_currents(gids, times, currents):

    params = {"amplitude_times": times, "amplitude_values":currents}

    scg = nest.Create("step_current_generator", params, _warn=False)
    scg = nest.Create("step_current_generator", params=params, _warn=False)

    gids = _get_nest_gids(gids)

@@ -282,6 +289,7 @@ def randomize_neural_states(network, instructions, groups=None, nodes=None,
        else:
            raise AttributeError(
                '`network` has not been sent to NEST yet.')

    gids = []

    if nodes is not None and groups is not None:
@@ -306,7 +314,7 @@ def randomize_neural_states(network, instructions, groups=None, nodes=None,
        nest.SetStatus(gids, key, state, _warn=False)

        if nodes is None:
            nodes = network.id_from_nest_gid(gids)
            nodes = network.id_from_nest_gid(np.asarray(gids))

        # store the values in the node attributes
        if key not in ("V_m", "w"):
@@ -330,7 +338,7 @@ def monitor_groups(group_names, network, nest_recorder=None, params=None):
        Names of the groups that should be recorded.
    network : :class:`~nngt.Network` or subclass
        Network which population will be used to differentiate groups.
    nest_recorder : strings or list, optional (default: "spike_detector"0)
    nest_recorder : strings or list, optional (default: spike recorder)
        Device(s) to monitor the network.
    params : dict or list of, optional (default: `{}`)
        Dictionarie(s) containing the parameters for each recorder (see
@@ -343,7 +351,7 @@ def monitor_groups(group_names, network, nest_recorder=None, params=None):
    recordables : list of the recordables' names.
    '''
    if nest_recorder is None:
        nest_recorder = ["spike_detector"]
        nest_recorder = [spike_rec]
    elif not nonstring_container(nest_recorder):
        nest_recorder = [nest_recorder]

@@ -376,7 +384,7 @@ def monitor_nodes(gids, nest_recorder=None, params=None, network=None):
        GIDs of the neurons in the NEST subnetwork; either one list per
        recorder if they should monitor different neurons or a unique list
        which will be monitored by all devices.
    nest_recorder : strings or list, optional (default: "spike_detector")
    nest_recorder : strings or list, optional (default: spike recorder)
        Device(s) to monitor the network.
    params : dict or list of, optional (default: `{}`)
        Dictionarie(s) containing the parameters for each recorder (see
@@ -391,7 +399,7 @@ def monitor_nodes(gids, nest_recorder=None, params=None, network=None):
    recordables : list of the recordables' names.
    '''
    if nest_recorder is None:
        nest_recorder = ["spike_detector"]
        nest_recorder = [spike_rec]
    elif not nonstring_container(nest_recorder):
        nest_recorder = [nest_recorder]

@@ -429,7 +437,7 @@ def _monitor(gids, nest_recorder, params):
            nest.SetStatus(device, params[i], _warn=False)
            nest.Connect(device, gids, conn_spec=di_spec, _warn=False)
        # event detectors
        elif "detector" in rec:
        elif rec == spike_rec:
            device = nest.Create(rec, _warn=False)

            recorders += device if nest_version == 3 else list(device)
@@ -460,7 +468,7 @@ def save_spikes(filename, recorder=None, network=None, save_positions=True,
        Path to the file where the activity should be saved.
    recorder : tuple or list of tuples, optional (default: None)
        The NEST gids of the recording devices. If None, then all existing
        "spike_detector"s are used.
        spike recorders are used.
    network : :class:`~nngt.Network` or subclass, optional (default: None)
        Network which activity will be monitored.
    save_positions : bool, optional (default: True)
@@ -494,9 +502,9 @@ def save_spikes(filename, recorder=None, network=None, save_positions=True,

            if nest_version == 3:
                if len(rcrdrs) == 1:
                    assert recrdrs.model == "spike_detector", \
                    assert recrdrs.model == spike_rec, \
                        'Only spike_detectors are supported.'
                    assert recrdrs.model == ("spike_detector",)*len(rcrdrs), \
                    assert recrdrs.model == (spike_rec,)*len(rcrdrs), \
                        'Only spike_detectors are supported.'
            else:
                assert (nest.GetStatus(rcrdrs, 'model')
@@ -504,7 +512,7 @@ def save_spikes(filename, recorder=None, network=None, save_positions=True,
                       'Only spike_detectors are supported.'
    else:
        if nest_version == 3:
            rcrdrs = nest.GetNodes(properties={'model': 'spike_detector'})
            rcrdrs = nest.GetNodes(properties={'model': spike_rec})
        else:
            rcrdrs = [[n] for n in nest.GetNodes(
                (0,), properties={'model': 'spike_detector'})[0]]
@@ -547,6 +555,9 @@ def _get_nest_gids(gids):
        if isinstance(gids, NodeCollection):
            return gids

        return NodeCollection(gids)
        return NodeCollection(sorted(gids))

    if nonstring_container(gids):
        return list(gids)

    return list(gids)
    return [gids]
diff --git a/testing/test_nest.py b/testing/test_nest.py
new file mode 100644
index 0000000..2915f6e
--- /dev/null
+++ b/testing/test_nest.py
@@ -0,0 +1,124 @@
#!/usr/bin/env python
#-*- coding:utf-8 -*-

# test_nest.py

# This file is part of the NNGT module
# Distributed as a free software, in the hope that it will be useful, under the
# terms of the GNU General Public License.

import nngt
import nngt.generation as ng
import numpy as np
import pytest

pytestmark = pytest.mark.skipif(nngt.get_config('mpi'),
                                reason="Don't test for MPI")

nest = pytest.importorskip("nest")

import nngt.simulation as ns


def make_nest_net(size, w, deg):
    '''
    Create a network in NEST
    '''
    net = nngt.Network.exc_and_inhib(size)

    pop = net.population

    ng.connect_groups(net, pop, pop, graph_model="fixed_degree",
                      degree=deg, degree_type="out", weights=w)

    gids = net.to_nest()

    return net, gids


def test_net_creation():
    '''
    Test the creation of a network in NEST.
    '''
    nest.ResetKernel()

    w = 5.

    net, gids = make_nest_net(100, w, deg=10)

    # check nodes and connections
    assert len(gids) == net.node_nb()

    conn = nest.GetConnections()

    assert len(conn) == net.edge_nb()

    weights = {d['weight'] for d in nest.GetStatus(conn)}

    assert weights == {-w, w}


def test_utils():
    '''
    Check NEST utility functions
    '''
    nest.ResetKernel()
    resol = nest.GetKernelStatus('resolution')

    w = 5.

    net, gids = make_nest_net(100, w, deg=10)

    # set inputs
    ns.set_noise(gids, 0., 20.)

    assert len(nest.GetConnections()) == net.edge_nb() + net.node_nb()

    ns.set_poisson_input(gids, 5.)

    assert len(nest.GetConnections()) == net.edge_nb() + 2*net.node_nb()

    ns.set_minis(net, base_rate=1.5, weight=2.)

    assert len(nest.GetConnections()) == net.edge_nb() + 3*net.node_nb()

    ns.set_step_currents(gids[::2], times=[40., 60.], currents=[800., 0.])

    min_conn = net.edge_nb() + 3*net.node_nb() + 1
    max_conn = net.edge_nb() + 4*net.node_nb()

    num_conn = len(nest.GetConnections())

    assert min_conn <= num_conn <= max_conn

    # check randomization of neuronal properties
    vms = {d['V_m'] for d in nest.GetStatus(gids)}

    assert len(vms) == 1

    ns.randomize_neural_states(net, {'V_m': ('uniform', -70., -60.)})

    vms = {d['V_m'] for d in nest.GetStatus(gids)}

    assert len(vms) == net.node_nb()

    # monitoring nodes
    sd, _ = ns.monitor_groups(net.population, net)

    assert len(nest.GetConnections()) == num_conn + net.node_nb()

    vm, rec = ns.monitor_nodes(gids[0], nest_recorder='voltmeter',
                               params={'interval': resol})

    assert len(nest.GetConnections()) == num_conn + net.node_nb() + 1

    nest.Simulate(100.)

    ns.plot_activity(show=False)

    ns.plot_activity(vm, rec, show=True)


if __name__ == "__main__":
    test_net_creation()
    test_utils()
-- 
2.32.0
builds.sr.ht
NNGT/patches/.build.yml: CANCELLED in 52s

[Simulation/Analysis - NEST3 support and bugfix for SWP][0] v4 from [tfardet][1]

[0]: https://lists.sr.ht/~tfardet/nngt-developers/patches/23380
[1]: mailto:tanguyfardet@protonmail.com

✗ #527962 CANCELLED NNGT/patches/.build.yml https://builds.sr.ht/~tfardet/job/527962