Publications

Type:
  • [DOI] Y. V. Zaytsev and A. Morrison, “Cynest: a maintainable cython-based interface for the nest simulator,” Frontiers in neuroinformatics, vol. 8, p. 23, 2014.
    [Bibtex]
    @Article{Zaytsev2014,
    Title = {CyNEST: a maintainable Cython-based interface for the NEST simulator},
    Author = {Yury V. Zaytsev and Abigail Morrison},
    Journal = {Frontiers in Neuroinformatics},
    Year = {2014},
    Pages = {23},
    Volume = {8},
    Doi = {10.3389/fninf.2014.00023},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2014.04.09}
    }
  • [DOI] S. Kunkel, M. Schmidt, J. M. Eppler, H. E. Plesser, G. Masumoto, J. Igarashi, S. Ishii, T. Fukai, A. Morrison, M. Diesmann, and M. Helias, “Spiking network simulation code for petascale computers,” Frontiers in neuroinformatics, vol. 8, iss. 78, 2014.
    [Bibtex]
    @Article{Kunkel2014,
    Title = {Spiking network simulation code for petascale computers},
    Author = {Kunkel, Susanne and Schmidt, Maximilian and Eppler, Jochen Martin and Plesser, Hans Ekkehard and Masumoto, Gen and Igarashi, Jun and Ishii, Shin and Fukai, Tomoki and Morrison, Abigail and Diesmann, Markus and Helias, Moritz},
    Journal = {Frontiers in Neuroinformatics},
    Year = {2014},
    Number = {78},
    Volume = {8},
    Doi = {10.3389/fninf.2014.00078},
    Nest_category = {nest_technology},
    Owner = {graber},
    Timestamp = {2015.02.17},
    Url = {http://www.frontiersin.org/neuroinformatics/10.3389/fninf.2014.00078/abstract}
    }
  • [DOI] M. Djurfeldt, A. P. Davison, and J. M. Eppler, “Efficient generation of connectivity in neuronal networks from simulator-independent descriptions,” Front neuroinform, vol. 8, iss. 43, 2014.
    [Bibtex]
    @Article{MikaelDjurfeldt2014,
    Title = {Efficient generation of connectivity in neuronal networks from simulator-independent descriptions},
    Author = {Djurfeldt, Mikael and Davison, Andrew P. and Eppler, Jochen M.},
    Journal = {Front Neuroinform},
    Year = {2014},
    Number = {43},
    Volume = {8},
    Comment = {nest_technology or nest_simulated},
    Doi = {10.3389/fninf.2014.00043},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2014.05.22},
    Url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4001034/}
    }
  • M. Diesmann, “The road to brain-scale simulations on K,” Biosupercomputing newsletter, vol. 8, p. 8, 2013.
    [Bibtex]
    @Article{Diesmann2013,
    Title = {The road to brain-scale simulations on {K}},
    Author = {Markus Diesmann},
    Journal = {BioSupercomputing Newsletter},
    Year = {2013},
    Pages = {8},
    Volume = {8},
    Nest_category = {nest_technology},
    Owner = {tetzlaff},
    Timestamp = {2013.03.05},
    Url = {http://www.csrp.riken.jp/BSNewsLetters/BSNvol8-1303/EN/report03.html}
    }
  • [DOI] S. Kunkel, T. C. Potjans, J. M. Eppler, H. E. Plesser, A. Morrison, and M. Diesmann, “Meeting the memory challenges of brain-scale network simulation.,” Front neuroinform, vol. 5, p. 35, 2011.
    [Bibtex]
    @Article{Kunkel2011b,
    Title = {Meeting the memory challenges of brain-scale network simulation.},
    Author = {Susanne Kunkel and Tobias C Potjans and Jochen M Eppler and Hans Ekkehard Plesser and Abigail Morrison and Markus Diesmann},
    Journal = {Front Neuroinform},
    Year = {2011},
    Pages = {35},
    Volume = {5},
    Abstract = {The development of high-performance simulation software is crucial for studying the brain connectome. Using connectome data to generate neurocomputational models requires software capable of coping with models on a variety of scales: from the microscale, investigating plasticity, and dynamics of circuits in local networks, to the macroscale, investigating the interactions between distinct brain regions. Prior to any serious dynamical investigation, the first task of network simulations is to check the consistency of data integrated in the connectome and constrain ranges for yet unknown parameters. Thanks to distributed computing techniques, it is possible today to routinely simulate local cortical networks of around 10(5) neurons with up to 10(9) synapses on clusters and multi-processor shared-memory machines. However, brain-scale networks are orders of magnitude larger than such local networks, in terms of numbers of neurons and synapses as well as in terms of computational load. Such networks have been investigated in individual studies, but the underlying simulation technologies have neither been described in sufficient detail to be reproducible nor made publicly available. Here, we discover that as the network model sizes approach the regime of meso- and macroscale simulations, memory consumption on individual compute nodes becomes a critical bottleneck. This is especially relevant on modern supercomputers such as the Blue Gene/P architecture where the available working memory per CPU core is rather limited. We develop a simple linear model to analyze the memory consumption of the constituent components of neuronal simulators as a function of network size and the number of cores used. This approach has multiple benefits. The model enables identification of key contributing components to memory saturation and prediction of the effects of potential improvements to code before any implementation takes place. As a consequence, development cycles can be shorter and less expensive. Applying the model to our freely available Neural Simulation Tool (NEST), we identify the software components dominant at different scales, and develop general strategies for reducing the memory consumption, in particular by using data structures that exploit the sparseness of the local representation of the network. We show that these adaptations enable our simulation software to scale up to the order of 10,000 processors and beyond. As memory consumption issues are likely to be relevant for any software dealing with complex connectome data on such architectures, our approach and our findings should be useful for researchers developing novel neuroinformatics solutions to the challenges posed by the connectome project.},
    Doi = {10.3389/fninf.2011.00035},
    Institution = {Functional Neural Circuits Group, Albert-Ludwig University of Freiburg Freiburg im Breisgau, Germany.},
    Language = {eng},
    Medline-pst = {ppublish},
    Nest_category = {nest_technology},
    Owner = {tetzlaff},
    Pmid = {22291636},
    Timestamp = {2013.01.22},
    Url = {http://dx.doi.org/10.3389/fninf.2011.00035}
    }
  • [DOI] W. Potjans, A. Morrison, and M. Diesmann, “Enabling functional neural circuit simulations with distributed computing of neuromodulated plasticity.,” Front comput neurosci, vol. 4, p. 141, 2010.
    [Bibtex]
    @Article{Potjans2010a,
    Title = {Enabling functional neural circuit simulations with distributed computing of neuromodulated plasticity.},
    Author = {Wiebke Potjans and Abigail Morrison and Markus Diesmann},
    Journal = {Front Comput Neurosci},
    Year = {2010},
    Pages = {141},
    Volume = {4},
    Abstract = {A major puzzle in the field of computational neuroscience is how to relate system-level learning in higher organisms to synaptic plasticity. Recently, plasticity rules depending not only on pre- and post-synaptic activity but also on a third, non-local neuromodulatory signal have emerged as key candidates to bridge the gap between the macroscopic and the microscopic level of learning. Crucial insights into this topic are expected to be gained from simulations of neural systems, as these allow the simultaneous study of the multiple spatial and temporal scales that are involved in the problem. In particular, synaptic plasticity can be studied during the whole learning process, i.e., on a time scale of minutes to hours and across multiple brain areas. Implementing neuromodulated plasticity in large-scale network simulations where the neuromodulatory signal is dynamically generated by the network itself is challenging, because the network structure is commonly defined purely by the connectivity graph without explicit reference to the embedding of the nodes in physical space. Furthermore, the simulation of networks with realistic connectivity entails the use of distributed computing. A neuromodulated synapse must therefore be informed in an efficient way about the neuromodulatory signal, which is typically generated by a population of neurons located on different machines than either the pre- or post-synaptic neuron. Here, we develop a general framework to solve the problem of implementing neuromodulated plasticity in a time-driven distributed simulation, without reference to a particular implementation language, neuromodulator, or neuromodulated plasticity mechanism. We implement our framework in the simulator NEST and demonstrate excellent scaling up to 1024 processors for simulations of a recurrent network incorporating neuromodulated spike-timing dependent plasticity.},
    Doi = {10.3389/fncom.2010.00141},
    Institution = {Institute of Neuroscience and Medicine (INM-6), Computational and Systems Neuroscience, Research Center Jülich Jülich, Germany.},
    Language = {eng},
    Medline-pst = {epublish},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Pmid = {21151370},
    Timestamp = {2013.06.07},
    Url = {http://dx.doi.org/10.3389/fncom.2010.00141}
    }
  • [DOI] J. Eppler, “A python interface to NEST,” The neuromorphic engineer, 2009.
    [Bibtex]
    @Article{Eppler2009,
    Title = {A Python interface to {NEST}},
    Author = {Jochen Eppler},
    Journal = {The Neuromorphic Engineer},
    Year = {2009},
    Doi = {10.2417/1200912.1703},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.06.12},
    Url = {http://www.awareness-mag.eu/pdf/1703/1703.pdf}
    }
  • [DOI] J. M. Eppler, M. Helias, E. Muller, M. Diesmann, and M. Gewaltig, “PyNEST: a convenient interface to the NEST simulator.,” Front neuroinform, vol. 2, p. 12, 2008.
    [Bibtex]
    @Article{Eppler2008,
    Title = {Py{NEST}: A Convenient Interface to the {NEST} Simulator.},
    Author = {Jochen Martin Eppler and Moritz Helias and Eilif Muller and Markus Diesmann and Marc-Oliver Gewaltig},
    Journal = {Front Neuroinform},
    Year = {2008},
    Pages = {12},
    Volume = {2},
    Abstract = {The neural simulation tool NEST (http://www.nest-initiative.org) is a simulator for heterogeneous networks of point neurons or neurons with a small number of compartments. It aims at simulations of large neural systems with more than 10(4) neurons and 10(7) to 10(9) synapses. NEST is implemented in C++ and can be used on a large range of architectures from single-core laptops over multi-core desktop computers to super-computers with thousands of processor cores. Python (http://www.python.org) is a modern programming language that has recently received considerable attention in Computational Neuroscience. Python is easy to learn and has many extension modules for scientific computing (e.g. http://www.scipy.org). In this contribution we describe PyNEST, the new user interface to NEST. PyNEST combines NEST's efficient simulation kernel with the simplicity and flexibility of Python. Compared to NEST's native simulation language SLI, PyNEST makes it easier to set up simulations, generate stimuli, and analyze simulation results. We describe how PyNEST connects NEST and Python and how it is implemented. With a number of examples, we illustrate how it is used.},
    Doi = {10.3389/neuro.11.012.2008},
    Institution = {Honda Research Institute Europe GmbH, Offenbach Germany.},
    Language = {eng},
    Medline-pst = {ppublish},
    Nest_category = {nest_technology},
    Owner = {tetzlaff},
    Pmid = {19198667},
    Timestamp = {2013.01.22},
    Url = {http://dx.doi.org/10.3389/neuro.11.012.2008}
    }
  • [DOI] H. Plesser, J. Eppler, A. Morrison, M. Diesmann, and M. Gewaltig, “Efficient parallel simulation of large-scale neuronal networks on clusters of multiprocessor computers,” in Euro-par 2007 parallel processing, A. Kermarrec, L. Bougé, and T. Priol, Eds., Springer Berlin Heidelberg, 2007, vol. 4641, pp. 672-681.
    [Bibtex]
    @InCollection{Plesser2007,
    Title = {Efficient Parallel Simulation of Large-Scale Neuronal Networks on Clusters of Multiprocessor Computers},
    Author = {Plesser, HansE. and Eppler, JochenM. and Morrison, Abigail and Diesmann, Markus and Gewaltig, Marc-Oliver},
    Booktitle = {Euro-Par 2007 Parallel Processing},
    Publisher = {Springer Berlin Heidelberg},
    Year = {2007},
    Editor = {Kermarrec, Anne-Marie and Bougé, Luc and Priol, Thierry},
    Pages = {672-681},
    Series = {Lecture Notes in Computer Science},
    Volume = {4641},
    Doi = {10.1007/978-3-540-74466-5_71},
    ISBN = {978-3-540-74465-8},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.06.07},
    Url = {http://dx.doi.org/10.1007/978-3-540-74466-5_71}
    }
  • M. Gewaltig and M. Diesmann, “NEST:NEural simulation tool,” Scholarpedia, vol. 2, iss. 4, p. 1430, 2007.
    [Bibtex]
    @Article{Gewaltig2007,
    Title = {{NEST}:{NE}ural Simulation Tool},
    Author = {Marc-Oliver Gewaltig and Markus Diesmann},
    Journal = {Scholarpedia},
    Year = {2007},
    Number = {4},
    Pages = {1430},
    Volume = {2},
    File = {:cs-gewaltig2007.pdf:PDF},
    Nest_category = {nest_technology},
    Owner = {zyv},
    Timestamp = {2010.05.25}
    }
  • [DOI] A. Morrison, S. Straube, H. E. Plesser, and M. Diesmann, “Exact subthreshold integration with continuous spike times in discrete-time neural network simulations,” Neural comput, vol. 19, iss. 1, pp. 47-49, 2007.
    [Bibtex]
    @Article{Diesmann2007a,
    Title = {Exact Subthreshold Integration with Continuous Spike Times in Discrete-Time Neural Network Simulations},
    Author = {Abigail Morrison and Sirko Straube and Hans E Plesser and Markus Diesmann},
    Journal = {Neural Comput},
    Year = {2007},
    Number = {1},
    Pages = {47-49},
    Volume = {19},
    Abstract = {Very large networks of spiking neurons can be simulated efficiently in parallel under the constraint that spike times are bound to an equidistant time grid. Within this scheme, the subthreshold dynamics of a wide class of integrate-and-fire-type neuron models can be integrated exactly from one grid point to the next. However, the loss in accuracy caused by restricting spike times to the grid can have undesirable consequences, which has led to interest in interpolating spike times between the grid points to retrieve an adequate representation of network dynamics. We demonstrate that the exact integration scheme can be combined naturally with off-grid spike events found by interpolation. We show that by exploiting the existence of a minimal synaptic propagation delay, the need for a central event queue is removed, so that the precision of event-driven simulation on the level of single neurons is combined with the efficiency of time-driven global scheduling. Further, for neuron models with linear subthreshold dynamics, even local event queuing can be avoided, resulting in much greater efficiency on the single-neuron level. These ideas are exemplified by two implementations of a widely used neuron model. We present a measure for the efficiency of network simulations in terms of their integration error and show that for a wide range of input spike rates, the novel techniques we present are both more accurate and faster than standard techniques.},
    Doi = {10.1162/neco.2007.19.1.47},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.07.03},
    Url = {http://www.mitpressjournals.org/doi/abs/10.1162/neco.2007.19.1.47}
    }
  • [DOI] H. E. Plesser, J. M. Eppler, A. Morrison, M. Diesmann, and M. Gewaltig, “Efficient parallel simulation of large-scale neuronal networks on clusters of multiprocessor computers,” in Euro-par 2007 parallel processing, Springer, 2007, vol. 4641, pp. 672-681.
    [Bibtex]
    @InCollection{Plesser2007a,
    Title = {Efficient parallel simulation of large-scale neuronal networks on clusters of multiprocessor computers},
    Author = {Plesser, Hans Ekkehard and Eppler, Jochen Martin and Morrison, Abigail and Diesmann, Markus and Gewaltig, Marc-Oliver},
    Booktitle = {Euro-Par 2007 Parallel Processing},
    Publisher = {Springer},
    Year = {2007},
    Pages = {672--681},
    Series = {Lecture Notes in Computer Science},
    Volume = {4641},
    Doi = {10.1007/978-3-540-74466-5_71},
    File = {:cs-plesser2007.pdf:PDF},
    Nest_category = {nest_technology},
    Owner = {zaytsev},
    Timestamp = {2013.05.06}
    }
  • M. S. A. . S. Plesser H E and D. M, “Precise and efficient discrete time neural network simulation,” in Computational neuroscience meeting cns*06, s51, edinburgh, uk, 2006.
    [Bibtex]
    @InProceedings{Plesser2006,
    Title = {Precise and efficient discrete time neural network simulation},
    Author = {Plesser, H E, Morrison A, Straube S, and Diesmann M},
    Booktitle = {Computational Neuroscience Meeting CNS*06, S51, Edinburgh, UK},
    Year = {2006},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.06.07}
    }
  • S. Rotter and M. Diesmann, “Exact digital simulation of time-invariant linear systems with applications to neuronal modeling.,” Biol cybern, vol. 81, iss. 5-6, pp. 381-402, 1999.
    [Bibtex]
    @Article{Rotter1999,
    Title = {Exact digital simulation of time-invariant linear systems with applications to neuronal modeling.},
    Author = {S. Rotter and M. Diesmann},
    Journal = {Biol Cybern},
    Year = {1999},
    Month = {Nov},
    Number = {5-6},
    Pages = {381--402},
    Volume = {81},
    Abstract = {An efficient new method for the exact digital simulation of time-invariant linear systems is presented. Such systems are frequently encountered as models for neuronal systems, or as submodules of such systems. The matrix exponential is used to construct a matrix iteration, which propagates the dynamic state of the system step by step on a regular time grid. A large and general class of dynamic inputs to the system, including trains of delta-pulses, can be incorporated into the exact simulation scheme. An extension of the proposed scheme presents an attractive alternative for the approximate simulation of networks of integrate-and-fire neurons with linear sub-threshold integration and non-linear spike generation. The performance of the proposed method is analyzed in comparison with a number of multi-purpose solvers. In simulations of integrate-and-fire neurons, Exact Integration systematically generates the smallest error with respect to both sub-threshold dynamics and spike timing. For the simulation of systems where precise spike timing is important, this results in a practical advantage in particular at moderate integration step sizes.},
    Institution = {Neurobiologie und Biophysik, Institut für Biologie III, Universität Freiburg, Freiburg, Germany. rotter@biologie.uni-freiburg.de},
    Keywords = {Animals; Computer Simulation; Cybernetics; Evoked Potentials; Humans; Linear Models; Models, Neurological; Nerve Net, physiology; Neurons, physiology; Time Factors},
    Language = {eng},
    Medline-pst = {ppublish},
    Nest_category = {nest_technology},
    Owner = {tetzlaff},
    Pii = {90810381.422},
    Pmid = {10592015},
    Timestamp = {2013.02.02}
    }
  • A. A. Markus Diesmann Marc-Oliver Gewaltig, “SYNOD: an environment for neural systems simulations. language interface and tutorial,” , 1995.
    [Bibtex]
    @Article{MarkusDiesmann1995,
    Title = {{SYNOD}: an Environment for Neural Systems Simulations. Language Interface and Tutorial},
    Author = {Markus Diesmann, Marc-Oliver Gewaltig, Ad Aersten},
    Year = {1995},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.06.07},
    Url = {http://www.nest-initiative.org/publications/1995/Diesmann95.pdf}
    }
  • J. Eppler, A. Morrison, M. Diesmann, H. Plesser, and M. Gewaltig, “Parallel and distributed simulation of large biological neural networks with NEST,” in Computational neuroscience meeting cns*06, s48, edingburgh, uk, .
    [Bibtex]
    @InProceedings{Eppler,
    Title = {Parallel and Distributed Simulation of Large Biological Neural Networks with {NEST}},
    Author = {Eppler, Jochen and Morrison, Abigail and Diesmann, Markus and Plesser, Hand-Ekkehard and Gewaltig, Marc-Oliver},
    Booktitle = {Computational Neuroscience Meeting CNS*06, S48, Edingburgh, UK},
    Nest_category = {nest_technology},
    Owner = {krishnan},
    Timestamp = {2013.06.09}
    }