Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 17 additions & 13 deletions nestkernel/mpi_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -320,32 +320,36 @@ nest::MPIManager::get_processor_name()
void
nest::MPIManager::communicate( std::vector< long >& local_nodes, std::vector< long >& global_nodes )
{
size_t np = get_num_processes();
// Get size of buffers
std::vector< int > num_nodes_per_rank( np );
num_nodes_per_rank[ get_rank() ] = local_nodes.size();
communicate( num_nodes_per_rank );
const size_t num_procs = get_num_processes();

// We need to work with int in much what follows, because several MPI_Allgatherv() arguments must be int.
assert( local_nodes.size() <= std::numeric_limits< int >::max() );
const int num_local_nodes = static_cast< int >( local_nodes.size() );

const size_t num_globals = std::accumulate( num_nodes_per_rank.begin(), num_nodes_per_rank.end(), 0 );
// Get number of nodes per rank and total.
std::vector< int > num_nodes_per_rank( num_procs );
num_nodes_per_rank[ get_rank() ] = num_local_nodes;
communicate( num_nodes_per_rank );
const int num_globals = std::accumulate( num_nodes_per_rank.begin(), num_nodes_per_rank.end(), 0 );
if ( num_globals == 0 )
{
return; // must return here to avoid passing address to empty global_nodes below
return; // Must return here to avoid passing address to empty global_nodes below
}

global_nodes.resize( num_globals, 0L );

// Set up displacements vector. Entry i specifies the displacement (relative
// to recv_buffer ) at which to place the incoming data from process i
std::vector< int > displacements( np, 0 );
for ( size_t i = 1; i < np; ++i )
std::vector< int > displacements( num_procs, 0 );
for ( size_t i = 1; i < num_procs; ++i )
{
displacements.at( i ) = displacements.at( i - 1 ) + num_nodes_per_rank.at( i - 1 );
}

// avoid dereferencing empty vector
const auto send_ptr = local_nodes.empty() ? nullptr : &local_nodes[ 0 ];
MPI_Allgatherv( send_ptr,
local_nodes.size(),
// Avoid dereferencing empty vector. As long as sendcount is 0, we can pass any pointer for sendbuf.
long dummy = 0;
MPI_Allgatherv( num_local_nodes > 0 ? &local_nodes[ 0 ] : &dummy,
num_local_nodes,
MPI_Type< long >::type,
&global_nodes[ 0 ],
&num_nodes_per_rank[ 0 ],
Expand Down
21 changes: 13 additions & 8 deletions nestkernel/node_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ NodeManager::get_nodes( const DictionaryDatum& params, const bool local_only )
nodes_on_thread.resize( kernel().vp_manager.get_num_threads() );
#pragma omp parallel
{
size_t tid = kernel().vp_manager.get_thread_id();
const size_t tid = kernel().vp_manager.get_thread_id();

for ( auto node : get_local_nodes( tid ) )
{
Expand Down Expand Up @@ -387,6 +387,12 @@ NodeManager::get_nodes( const DictionaryDatum& params, const bool local_only )
break;
}
}
else
{
// We were looking for a parameter not existing in the node, so it is no match.
match = false;
break;
}
}
if ( match )
{
Expand All @@ -408,15 +414,14 @@ NodeManager::get_nodes( const DictionaryDatum& params, const bool local_only )
nodes.push_back( globalnodes[ i ] );
}
}

// get rid of any multiple entries
std::sort( nodes.begin(), nodes.end() );
std::vector< long >::iterator it;
it = std::unique( nodes.begin(), nodes.end() );
nodes.resize( it - nodes.begin() );
}

std::sort( nodes.begin(), nodes.end() ); // ensure nodes are sorted prior to creating the NodeCollection
// get rid of any multiple entries
std::sort( nodes.begin(), nodes.end() );
std::vector< long >::iterator it;
it = std::unique( nodes.begin(), nodes.end() );
nodes.resize( it - nodes.begin() );

IntVectorDatum nodes_datum( nodes );
NodeCollectionDatum nodecollection( NodeCollection::create( nodes_datum ) );

Expand Down
1 change: 1 addition & 0 deletions testsuite/pytests/mpi/2/test_getnodes.py
123 changes: 64 additions & 59 deletions testsuite/pytests/test_getnodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,69 +23,74 @@
Test GetNodes
"""

import unittest

import nest


@nest.ll_api.check_stack
class GetNodesTestCase(unittest.TestCase):
"""Test GetNodes function"""

def setUp(self):
nest.ResetKernel()

a = nest.Create("iaf_psc_alpha", 3) # noqa: F841
b = nest.Create("iaf_psc_delta", 2, {"V_m": -77.0}) # noqa: F841
c = nest.Create(
"iaf_psc_alpha", 4, {"V_m": [-77.0, -66.0, -77.0, -66.0], "tau_m": [10.0, 11.0, 12.0, 13.0]} # noqa: F841
)
d = nest.Create("iaf_psc_exp", 4) # noqa: F841

def test_GetNodes(self):
"""test GetNodes"""
all_nodes_ref = nest.NodeCollection(list(range(1, nest.network_size + 1)))
all_nodes = nest.GetNodes()

self.assertEqual(all_nodes_ref, all_nodes)

def test_GetNodes_with_params(self):
"""test GetNodes with params"""
nodes_Vm = nest.GetNodes({"V_m": -77.0})
nodes_Vm_ref = nest.NodeCollection([4, 5, 6, 8])

self.assertEqual(nodes_Vm_ref, nodes_Vm)

nodes_Vm_tau = nest.GetNodes({"V_m": -77.0, "tau_m": 12.0})
nodes_Vm_tau_ref = nest.NodeCollection([8])

self.assertEqual(nodes_Vm_tau_ref, nodes_Vm_tau)

nodes_exp = nest.GetNodes({"model": "iaf_psc_exp"})
nodes_exp_ref = nest.NodeCollection([10, 11, 12, 13])

self.assertEqual(nodes_exp_ref, nodes_exp)

def test_GetNodes_no_match(self):
import pytest


# Apply parameterization over number of threads here, each test will be run once for
# each parameter value. Need to protect in case we are running without threads.
@pytest.fixture(autouse=True, params=[1, 2] if nest.ll_api.sli_func("is_threaded") else [1])
def create_neurons(request):
nest.ResetKernel()
nest.local_num_threads = request.param

nest.Create("iaf_psc_alpha", 3)
nest.Create("iaf_psc_delta", 2, {"V_m": -77.0})
# Work around because Create() with arrays does not work with more than one rank
# nest.Create("iaf_psc_alpha", 4, {"V_m": [-77.0, -66.0, -77.0, -66.0], "tau_m": [10.0, 11.0, 12.0, 13.0]})
c = nest.Create("iaf_psc_alpha", 4, {"V_m": -77.0, "tau_m": 10.0})
c[1].V_m = -66.0
c[3].V_m = -66.0
c[1].tau_m = 11.0
c[2].tau_m = 12.0
c[3].tau_m = 13.0

nest.Create("iaf_psc_exp", 4)
nest.Create("spike_generator", 3)


@pytest.mark.parametrize("local_only", [True, False])
class TestGetNodes:
"""Tests for GetNodes() function."""

def test_GetNodes(self, local_only):
"""Test that nodes are correctly retrieved if on parameters are specified."""

nodes_ref = nest.NodeCollection(list(range(1, nest.network_size + 1)))
if nodes_ref and local_only:
# Need to go via global_id to get empty node collection if no locals
nodes_ref = nest.NodeCollection([n.global_id for n in nodes_ref if n.local])

nodes = nest.GetNodes(local_only=local_only)

assert nodes == nodes_ref

@pytest.mark.parametrize(
"filter, expected_ids",
[
[{"V_m": -77.0}, [4, 5, 6, 8]],
[{"V_m": -77.0, "tau_m": 12.0}, [8]],
[{"model": "iaf_psc_exp"}, [10, 11, 12, 13]],
[{"model": "spike_generator"}, [14, 15, 16]],
],
)
def test_GetNodes_with_params(self, local_only, filter, expected_ids):
"""Test that nodes are correctly filtered."""

nodes_ref = nest.NodeCollection(expected_ids)
if local_only:
# Need to go via global_id to get empty node collection if no locals
nodes_ref = nest.NodeCollection([n.global_id for n in nodes_ref if n.local])

nodes = nest.GetNodes(properties=filter, local_only=local_only)
assert nodes == nodes_ref

def test_GetNodes_no_match(self, local_only):
"""
Ensure we get an empty result if nothing matches.

This would lead to crashes in MPI-parallel code before #3460.
"""

nodes = nest.GetNodes({"V_m": 100.0})
self.assertEqual(len(nodes), 0)


def suite():
suite = unittest.makeSuite(GetNodesTestCase, "test")
return suite


def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())


if __name__ == "__main__":
run()
nodes = nest.GetNodes({"V_m": 100.0}, local_only=local_only)
assert len(nodes) == 0
Loading