From c1de7a89bae14e3b07868caf5c7f97f82b63f045 Mon Sep 17 00:00:00 2001 From: jchen6727 Date: Wed, 17 Jul 2024 16:01:52 -0500 Subject: [PATCH] Batch (#826) update batch re: end user suggestion can now initialize cfg with a dictionary: ``` Python from netpyne.batchtools import specs cfg = specs.SimConfig({'x': [None] * 4}) ``` creates a new cfg such that it already has value x, e.g. cfg.x = [None, None, None, None] update to documentation with some minimal working examples (see https://github.com/suny-downstate-medical-center/netpyne/tree/batch/netpyne/batchtools/examples), esp. Rosenbrock w/ coupled parameters, nested parameters, etc. --- CHANGES.md | 6 +- doc/build.py | 2 +- doc/source/user_documentation.rst | 180 ++++++++++++++++-- netpyne/batch/utils.py | 12 +- netpyne/batchtools/docs/batchtools.rst | 175 ++++++++++++++--- netpyne/batchtools/examples/CA3/README.md | 0 .../batchtools/examples/CA3/grid_search.py | 2 +- .../rosenbrock/basic_rosenbrock/batch.py | 21 ++ .../rosenbrock/basic_rosenbrock/rosenbrock.py | 37 ++++ .../rosenbrock/coupled_rosenbrock/batch.py | 24 +++ .../coupled_rosenbrock/rosenbrock.py | 41 ++++ .../rosenbrock/fanova_rosenbrock/batch.py | 23 +++ .../fanova_rosenbrock/rosenbrock.py | 49 +++++ .../rosenbrock/nested_rosenbrock/batch.py | 23 +++ .../nested_rosenbrock/rosenbrock.py | 41 ++++ netpyne/batchtools/runners.py | 31 ++- netpyne/cell/inputs.py | 4 +- netpyne/network/conn.py | 61 +++--- netpyne/sim/save.py | 57 +++--- netpyne/sim/setup.py | 7 +- netpyne/specs/netParams.py | 10 +- netpyne/specs/simConfig.py | 12 +- 22 files changed, 696 insertions(+), 122 deletions(-) create mode 100644 netpyne/batchtools/examples/CA3/README.md create mode 100644 netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py create mode 100644 netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py create mode 100644 netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py diff --git a/CHANGES.md b/CHANGES.md index 7c4219c2f..3cc7a9c08 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,7 +2,7 @@ **New features** -- Added progress-bar indicating network creation progress +- Added progress-bar indicating network creation progress. Toggle the progress bar with cfg.progressBar - cfg.connRandomSecFromList and cfg.distributeSynsUniformly can now be overriden in individual conn rule @@ -20,6 +20,10 @@ - Fixed a bug in `gatherDataFromFiles()` where cellGids for node 0 were lost +- Fixed generating rhythmic spiking pattern with 'uniform' option + +- Fixed misleading console output when cfg.recordStims is On + # Version 1.0.6 **New features** diff --git a/doc/build.py b/doc/build.py index 3a3e6b5ff..75f572aff 100644 --- a/doc/build.py +++ b/doc/build.py @@ -51,7 +51,7 @@ 13) Announce the new release 13a) New release announcement text: NetPyNE v#.#.# is now available. For a complete list of changes and bug fixes see: https://github.com/suny-downstate-medical-center/netpyne/releases/tag/v#.#.# - See here for instructions to install or update to the latest version: http://www.netpyne.org/install.html + See here for instructions to install or update to the latest version: https://www.netpyne.org/documentation/installation 13b) Announce on NEURON forum: https://www.neuron.yale.edu/phpBB/viewtopic.php?f=45&t=3685&sid=9c380fe3a835babd47148c81ae71343e 13c) Announce to Google group: diff --git a/doc/source/user_documentation.rst b/doc/source/user_documentation.rst index d46896a0f..a0ae689a7 100644 --- a/doc/source/user_documentation.rst +++ b/doc/source/user_documentation.rst @@ -2547,11 +2547,14 @@ The code for neural network optimization through evolutionary algorithm used in .. Adding cell classes .. -------------------- -Running a Batch Job (Beta) +Running a Batch Job =================== -The NetPyNE batchtools subpackage provides a method of automating job submission and reporting:: +The NetPyNE batchtools subpackage provides a method of automating job submission and reporting +A diagram of the object interfaces... + +:: batch<-->\ /---> configuration_0 >---\ \ / specs---\ @@ -2568,10 +2571,30 @@ The NetPyNE batchtools subpackage provides a method of automating job submission \ ... +While objects and interfaces can be handled directly, batchtools offers simple wrapper commands applicable to most use-cases, where +automatic parameter searches can be done by specifying a search space and algorithm through `netpyne.batchtools.search`, and +parameter to model translation and result communication is handled through `netpyne.batchtools.specs` and `netpyne.batchtools.comm` respectively. + +A diagram of the wrapper interactions... + +:: + netpyne.batchtools.search.search( ) ----------------------------\ host + | | + | search( ) | + ============================================================================================== + | comm.initialize( ) + | comm.send( ) + | cfg = netpyne.batchtools.specs.SimConfig( ) comm.close( ) + | | ^ ^ + v v | | + cfg.update_cfg() ----------------------------------------/ | + | + send( ) netpyne.batchtools.comm( ) + simulation 1. Setting up batchtools ------ +------------------------ Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: @@ -2596,20 +2619,20 @@ Ray is a dependency for batchtools, and should be installed with the following c pip install -u ray[default] 2. Examples ------ -Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `here `_. +----------- +Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `on the NetPyNE github `_. -Examples of the underlying batchtk package can be in the ``examples`` directory `here `_. +Examples of the underlying batchtk package can be in the ``examples`` directory `on the batchtk github `_. 3. Retrieving batch configuration values through the ``specs`` object ------ +--------------------------------------------------------------------- Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with the dispatcher through the ``comm`` object. importing the relevant objects:: from netpyne.batchtools import specs, comm - cfg = specs.SimConfig() # create a SimConfig object + cfg = specs.SimConfig() # create a SimConfig object, can be provided with a dictionary on initial call to set initial values netParams = specs.NetParams() # create a netParams object ``netpyne.batchtools.specs`` behaves similarly to ``netpyne.sim.specs`` except in the following cases: @@ -2635,8 +2658,9 @@ This replaces the previous idiom for updating the SimConfig object with mappings + 4. Communicating results to the ``dispatcher`` with the ``comm`` object ------ +----------------------------------------------------------------------- Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back The ``comm`` object determines the method of communication based on the batch job submission type. @@ -2646,15 +2670,18 @@ In terms of the simulation, the following functions are available to the user: * **comm.initialize()**: establishes a connection with the batch ``dispatcher`` for sending data * **comm.send()**: sends ```` to the batch ``dispatcher`` + * for ``search`` jobs, it is important to match the data sent with the metric specified in the search function * **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` 5. Specifying a batch job ------ +------------------------- Batch job handling is implemented with methods from ``netpyne.batchtools.search`` -**search**:: +**search** + +.. code-block:: python def search(job_type: str, # the submission engine to run a single simulation (e.g. 'sge', 'sh') comm_type: str, # the method of communication between host dispatcher and the simulation (e.g. 'socket', 'filesystem') @@ -2708,7 +2735,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **params**: a dictionary of config values to perform the search over. The keys of the dictionary should match the keys of the config object to be updated. Lists or numpy generators >2 values will force a grid search over the values; otherwise, a list of two values will create a uniform distribution sample space. - **usage 1**: updating a constant value specified in the ``SimConfig`` object :: + **usage 1**: updating a constant value specified in the ``SimConfig`` object + +.. code-block:: python # take a config object with the following parameter ``foo`` cfg = specs.SimConfig() @@ -2727,7 +2756,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t 'foo': range(10) } - **usage 2**: updating a nested object in the ``SimConfig`` object:: + **usage 2**: updating a nested object in the ``SimConfig`` object + +.. code-block:: python # to update a nested object, the package uses the `.` operator to specify reflection into the object. # take a config object with the following parameter object ``foo`` @@ -2752,9 +2783,35 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t # cfg.foo = {'bar': {'baz': 0}} # params = {'foo.bar.baz': range(10)} + **usage 3**: updating a list object in the ``SimConfig`` object + +.. code-block:: python + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following + cfg = specs.SimConfig() + cfg.foo = [0, 1, 4, 9, 16] + cfg.update() + + # specify a search space for ``foo[0]`` with `foo.0` such that a simulation will run: + # cfg.foo[0] = 0 + # cfg.foo[0] = 1 + # cfg.foo[0] = 2 + # ... + # cfg.foo[0] = 9 + + # using: + params = { + 'foo.0': range(10) + } + + # this reflection works with nested objects as well... + * **algorithm** : the search algorithm (supported within ``ray.tune``) - **Supported algorithms**:: + **Supported algorithms** + +.. code-block:: python * "variant_generator": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) * "random": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) @@ -2786,11 +2843,87 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) -6. Performing parameter optimization searches (CA3 example) ------ -The ``examples`` directory `here `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. +6. Batch searches on the Rosenbrock function (some simple examples) +------------------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ contains multiple methods of performing automatic parameter search of a +2 dimensional Rosenbrock function. These examples are used to quickly demonstrate some of the functionality of batch communications rather than the full process of running parameter searches on a detailed +NEURON simulation (see 7. Performing parameter optimization searches (CA3 example)) and therefore only contain the a `batch.py` file containing the script detailing the parameter space and search method, and a +`rosenbrock.py` file containing the function to explore, and the appropriate declarations and calls for batch automation and communication (rather than the traditional `cfg.py`, `netParams.py`, and `init.py` files). + +1. `basic_rosenbrock `_ + +This demonstrates a basic grid search of the Rosenbrock function using the new ``batchtools``, where the search space is defined as the cartesian product of ``params['x0']`` and ``params['x1']`` + +.. code-block:: python + + # from batch.py + params = {'x0': [0, 3], + 'x1': [0, 3], + } + +that is, with the values ``cfg.x0``, ``cfg.x1`` iterating over: ``[(0, 0), (0, 3), (3, 0), (3, 3)]`` list + +2. `coupled_rosenbrock `_ + +This demonstrates a basic paired grid search, where ``x0`` is ``[0, 1, 2]`` and x1[n] is ``x0[n]**2`` + +.. code-block:: python + + # from batch.py + x0 = numpy.arange(0, 3) + x1 = x0**2 + + x0_x1 = [*zip(x0, x1)] + params = {'x0_x1': x0_x1 + } + +the ``x0`` and ``x1`` values are paired together to create a search space ``x0_x1`` iterating over: ``[(0, 0), (1, 1), (2, 4)]`` list + +then, in the ``rosenbrock.py`` file, a list of two values ``cfg.x0_x1`` is created to capture the ``x0_x1`` values, which is then unpacked into individual ``x0`` and ``x1`` values -In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined:: +.. code-block:: python + + # from rosenbrock.py + cfg.x0_x1 = [1, 1] + + cfg.update_cfg() + + # -------------- unpacking x0_x1 list -------------- # + x0, x1 = cfg.x0_x1 + +then the Rosenbrock function is evaluated with the unpacked ``x0`` and ``x1`` + +3. `random_rosenbrock `_ + +This demonstrates a grid search over a nested object, where ``xn`` is a list of 2 values which are independently modified to search the cartesian product of ``[0, 1, 2, 3, 4]`` and ``[0, 1, 2, 3, 4]`` + +.. code-block:: python + + # from batch.py + params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +By using ``xn.0`` and ``xn.1`` we can reference the 0th and 1st elements of the list, which is created and modified in rosenbrock.py + +.. code-block:: python + + # from rosenbrock.py + cfg.xn = [1, 1] + + cfg.update_cfg() + + # ---------------- unpacking x list ---------------- # + x0, x1 = cfg.xn + + +7. Performing parameter optimization searches (CA3 example) +----------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. + +In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined + +.. code-block:: python # from optuna_search.py params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], @@ -2798,7 +2931,9 @@ In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic w 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], } -in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined:: +in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined + +.. code-block:: python # from grid_search.py params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), @@ -2808,7 +2943,9 @@ in both ``optuna_search.py``, defining the upper and lower bounds of the search which defines ``3x3x3`` specific values to search over -Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation:: +Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation + +.. code-block:: python # from init.py results['PYR_loss'] = (results['PYR'] - 3.33875)**2 @@ -2824,4 +2961,5 @@ Note that the ``metric`` specifies a specific ``string`` (``loss``) to report an The ``out_json`` output contains a dictionary which includes the ``loss`` metric (calculated as the MSE between observed and expected values) -In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) \ No newline at end of file +In a multi-objective optimization, the relevant ``PYR_loss``, ``BC_loss``, and ``OLM_loss`` components are additionally included (see ``mo_optuna_search.py``) + diff --git a/netpyne/batch/utils.py b/netpyne/batch/utils.py index c77e26ea1..99bdbe759 100644 --- a/netpyne/batch/utils.py +++ b/netpyne/batch/utils.py @@ -28,11 +28,13 @@ def createFolder(folder): import os - if not os.path.exists(folder): - try: - os.mkdir(folder) - except OSError: - print(' Could not create %s' % (folder)) + # If file path does not exist, it will create the file path (parent and sub-directories) + + try: + os.makedirs(folder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) # ------------------------------------------------------------------------------- diff --git a/netpyne/batchtools/docs/batchtools.rst b/netpyne/batchtools/docs/batchtools.rst index 499340fbd..af5a69c9c 100644 --- a/netpyne/batchtools/docs/batchtools.rst +++ b/netpyne/batchtools/docs/batchtools.rst @@ -1,8 +1,11 @@ Running a Batch Job =================== -The NetPyNE batchtools subpackage provides a method of automating job submission and reporting:: +The NetPyNE batchtools subpackage provides a method of automating job submission and reporting +A diagram of the object interfaces... + +:: batch<-->\ /---> configuration_0 >---\ \ / specs---\ @@ -19,11 +22,30 @@ The NetPyNE batchtools subpackage provides a method of automating job submission \ ... +While objects and interfaces can be handled directly, batchtools offers simple wrapper commands applicable to most use-cases, where +automatic parameter searches can be done by specifying a search space and algorithm through `netpyne.batchtools.search`, and +parameter to model translation and result communication is handled through `netpyne.batchtools.specs` and `netpyne.batchtools.comm` respectively. + +A diagram of the wrapper interactions... +:: + netpyne.batchtools.search.search( ) ----------------------------\ host + | | + | search( ) | + ============================================================================================== + | comm.initialize( ) + | comm.send( ) + | cfg = netpyne.batchtools.specs.SimConfig( ) comm.close( ) + | | ^ ^ + v v | | + cfg.update_cfg() ----------------------------------------/ | + | + send( ) netpyne.batchtools.comm( ) + simulation 1. Setting up batchtools ------ +------------------------ Beyond the necessary dependency installations for NetPyNE and NEURON, several additional `pip` installations are required. The NetPyNE installation should be handled as a development installation of the repository branch `batch`:: @@ -48,21 +70,20 @@ Ray is a dependency for batchtools, and should be installed with the following c pip install -u ray[default] 2. Examples ------ -Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `here `_. +----------- +Examples of NetPyNE batchtools usage can be found in the ``examples`` directory `on the NetPyNE github `_. -Examples of the underlying batchtk package can be in the ``examples`` directory `here `_. +Examples of the underlying batchtk package can be in the ``examples`` directory `on the batchtk github `_. 3. Retrieving batch configuration values through the ``specs`` object - ------ +--------------------------------------------------------------------- Each simulation is able to retrieve relevant configurations through the ``specs`` object, and communicate with the dispatcher through the ``comm`` object. importing the relevant objects:: from netpyne.batchtools import specs, comm - cfg = specs.SimConfig() # create a SimConfig object + cfg = specs.SimConfig() # create a SimConfig object, can be provided with a dictionary on initial call to set initial values netParams = specs.NetParams() # create a netParams object ``netpyne.batchtools.specs`` behaves similarly to ``netpyne.sim.specs`` except in the following cases: @@ -90,8 +111,7 @@ This replaces the previous idiom for updating the SimConfig object with mappings 4. Communicating results to the ``dispatcher`` with the ``comm`` object - ------ +----------------------------------------------------------------------- Prior batched simulations relied on ``.pkl`` files to communicate data. The ``netpyne.batch`` subpackage uses a specific ``comm`` object to send custom data back The ``comm`` object determines the method of communication based on the batch job submission type. @@ -107,11 +127,12 @@ In terms of the simulation, the following functions are available to the user: * **comm.close()**: closes and cleans up the connection with the batch ``dispatcher`` 5. Specifying a batch job - ------ +------------------------- Batch job handling is implemented with methods from ``netpyne.batchtools.search`` -**search**:: +**search** + +.. code-block:: python def search(job_type: str, # the submission engine to run a single simulation (e.g. 'sge', 'sh') comm_type: str, # the method of communication between host dispatcher and the simulation (e.g. 'socket', 'filesystem') @@ -165,7 +186,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **params**: a dictionary of config values to perform the search over. The keys of the dictionary should match the keys of the config object to be updated. Lists or numpy generators >2 values will force a grid search over the values; otherwise, a list of two values will create a uniform distribution sample space. - **usage 1**: updating a constant value specified in the ``SimConfig`` object :: + **usage 1**: updating a constant value specified in the ``SimConfig`` object + +.. code-block:: python # take a config object with the following parameter ``foo`` cfg = specs.SimConfig() @@ -184,7 +207,9 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t 'foo': range(10) } - **usage 2**: updating a nested object in the ``SimConfig`` object:: + **usage 2**: updating a nested object in the ``SimConfig`` object + +.. code-block:: python # to update a nested object, the package uses the `.` operator to specify reflection into the object. # take a config object with the following parameter object ``foo`` @@ -209,9 +234,35 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t # cfg.foo = {'bar': {'baz': 0}} # params = {'foo.bar.baz': range(10)} + **usage 3**: updating a list object in the ``SimConfig`` object + +.. code-block:: python + + # to update a nested object, the package uses the `.` operator to specify reflection into the object. + # take a config object with the following + cfg = specs.SimConfig() + cfg.foo = [0, 1, 4, 9, 16] + cfg.update() + + # specify a search space for ``foo[0]`` with `foo.0` such that a simulation will run: + # cfg.foo[0] = 0 + # cfg.foo[0] = 1 + # cfg.foo[0] = 2 + # ... + # cfg.foo[0] = 9 + + # using: + params = { + 'foo.0': range(10) + } + + # this reflection works with nested objects as well... + * **algorithm** : the search algorithm (supported within ``ray.tune``) - **Supported algorithms**:: + **Supported algorithms** + +.. code-block:: python * "variant_generator": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) * "random": grid and random based search of the parameter space (see: https://docs.ray.io/en/latest/tune/api/suggestion.html) @@ -243,11 +294,87 @@ The basic search implemented with the ``search`` function uses ``ray.tune`` as t * **algorithm_config**: additional configuration for the search algorithm (see the `optuna docs `_) -6. Performing parameter optimization searches (CA3 example) ------ -The ``examples`` directory `here `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. +6. Batch searches on the Rosenbrock function (some simple examples) +------------------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ contains multiple methods of performing automatic parameter search of a +2 dimensional Rosenbrock function. These examples are used to quickly demonstrate some of the functionality of batch communications rather than the full process of running parameter searches on a detailed +NEURON simulation (see 7. Performing parameter optimization searches (CA3 example)) and therefore only contain the a `batch.py` file containing the script detailing the parameter space and search method, and a +`rosenbrock.py` file containing the function to explore, and the appropriate declarations and calls for batch automation and communication (rather than the traditional `cfg.py`, `netParams.py`, and `init.py` files). + +1. `basic_rosenbrock `_ + +This demonstrates a basic grid search of the Rosenbrock function using the new ``batchtools``, where the search space is defined as the cartesian product of ``params['x0']`` and ``params['x1']`` + +.. code-block:: python + + # from batch.py + params = {'x0': [0, 3], + 'x1': [0, 3], + } + +that is, with the values ``cfg.x0``, ``cfg.x1`` iterating over: ``[(0, 0), (0, 3), (3, 0), (3, 3)]`` list + +2. `coupled_rosenbrock `_ + +This demonstrates a basic paired grid search, where ``x0`` is ``[0, 1, 2]`` and x1[n] is ``x0[n]**2`` + +.. code-block:: python + + # from batch.py + x0 = numpy.arange(0, 3) + x1 = x0**2 + + x0_x1 = [*zip(x0, x1)] + params = {'x0_x1': x0_x1 + } + +the ``x0`` and ``x1`` values are paired together to create a search space ``x0_x1`` iterating over: ``[(0, 0), (1, 1), (2, 4)]`` list + +then, in the ``rosenbrock.py`` file, a list of two values ``cfg.x0_x1`` is created to capture the ``x0_x1`` values, which is then unpacked into individual ``x0`` and ``x1`` values + +.. code-block:: python + + # from rosenbrock.py + cfg.x0_x1 = [1, 1] + + cfg.update_cfg() + + # -------------- unpacking x0_x1 list -------------- # + x0, x1 = cfg.x0_x1 + +then the Rosenbrock function is evaluated with the unpacked ``x0`` and ``x1`` + +3. `random_rosenbrock `_ -In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined:: +This demonstrates a grid search over a nested object, where ``xn`` is a list of 2 values which are independently modified to search the cartesian product of ``[0, 1, 2, 3, 4]`` and ``[0, 1, 2, 3, 4]`` + +.. code-block:: python + + # from batch.py + params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +By using ``xn.0`` and ``xn.1`` we can reference the 0th and 1st elements of the list, which is created and modified in rosenbrock.py + +.. code-block:: python + + # from rosenbrock.py + cfg.xn = [1, 1] + + cfg.update_cfg() + + # ---------------- unpacking x list ---------------- # + x0, x1 = cfg.xn + + +7. Performing parameter optimization searches (CA3 example) +----------------------------------------------------------- +The ``examples`` directory `on the NetPyNE github `_ shows both a ``grid`` based search as well as an ``optuna`` based optimization. + +In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic weights, as well as the ``BC->PYR`` ``GABA`` synaptic weight. Note the search space is defined + +.. code-block:: python # from optuna_search.py params = {'nmda.PYR->BC' : [1e-3, 1.8e-3], @@ -255,7 +382,9 @@ In the ``CA3`` example, we tune the ``PYR->BC`` ``NMDA`` and ``AMPA`` synaptic w 'gaba.BC->PYR' : [0.4e-3, 1.0e-3], } -in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined:: +in both ``optuna_search.py``, defining the upper and lower bounds of the search space, while in ``grid_search.py`` the search space is defined + +.. code-block:: python # from grid_search.py params = {'nmda.PYR->BC' : numpy.linspace(1e-3, 1.8e-3, 3), @@ -265,7 +394,9 @@ in both ``optuna_search.py``, defining the upper and lower bounds of the search which defines ``3x3x3`` specific values to search over -Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation:: +Note that the ``metric`` specifies a specific ``string`` (``loss``) to report and optimize around. This value is generated and ``sent`` by the ``init.py`` simulation + +.. code-block:: python # from init.py results['PYR_loss'] = (results['PYR'] - 3.33875)**2 diff --git a/netpyne/batchtools/examples/CA3/README.md b/netpyne/batchtools/examples/CA3/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/netpyne/batchtools/examples/CA3/grid_search.py b/netpyne/batchtools/examples/CA3/grid_search.py index ed1860def..3ad5dedd2 100644 --- a/netpyne/batchtools/examples/CA3/grid_search.py +++ b/netpyne/batchtools/examples/CA3/grid_search.py @@ -26,7 +26,7 @@ run_config = sge_config -search(job_type = 'sge', # or shell +search(job_type = 'sge', # or 'sh' comm_type = 'socket', label = 'grid', diff --git a/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py new file mode 100644 index 000000000..ed6a05474 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/batch.py @@ -0,0 +1,21 @@ +from netpyne.batchtools.search import search + +params = {'x0': [0, 3], + 'x1': [0, 3] + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'optuna', + params = params, + output_path = '../optuna_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 9, + metric = 'fx', + mode = 'min', + algorithm = 'optuna', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..52fa35e86 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/basic_rosenbrock/rosenbrock.py @@ -0,0 +1,37 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.x0 = 1 +cfg.x1 = 1 + +cfg.update_cfg() + +# --------------------------------------------------- # + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': cfg.x0, 'x1': cfg.x1, 'fx': rosenbrock(cfg.x0, cfg.x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py new file mode 100644 index 000000000..73949dfba --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/batch.py @@ -0,0 +1,24 @@ +from netpyne.batchtools.search import search +import numpy +x0 = numpy.arange(0, 3) +x1 = x0**2 + +x0_x1 = [*zip(x0, x1)] +params = {'x0_x1': x0_x1 + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'grid', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 1, + metric = 'fx', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) \ No newline at end of file diff --git a/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..cc957b9d3 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/coupled_rosenbrock/rosenbrock.py @@ -0,0 +1,41 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.x0_x1 = [1, 1] + +cfg.update_cfg() + +# --------------------------------------------------- # + +# -------------- unpacking x0_x1 list -------------- # +x0, x1 = cfg.x0_x1 +# --------------------------------------------------- # + + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': x0, 'x1': x1, 'fx': rosenbrock(x0, x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py new file mode 100644 index 000000000..90c401225 --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/batch.py @@ -0,0 +1,23 @@ +from netpyne.batchtools.search import search + +params = {'x.0': [0, 3], + 'x.1': [0, 3], + 'x.2': [0, 3], + 'x.3': [0, 3], + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'optuna', + params = params, + output_path = '../optuna_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 9, + metric = 'fx', + mode = 'min', + algorithm = 'optuna', + max_concurrent = 3) diff --git a/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..e6ce3b29d --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/fanova_rosenbrock/rosenbrock.py @@ -0,0 +1,49 @@ +from netpyne.batchtools import specs, comm +import json + +# --- Rosenbrock Functions & Constant Definitions --- # + +""" +The rosenbrock_v0 (coupled rosenbrock) +""" + +A = 1 + + +def rosenbrock_v0(*args): + if len(args) % 2: + raise ValueError('rosenbrock_v0 requires an even number of arguments') + return sum(100 * (args[i]**2 - args[i+1])**2 + (args[i] - A)**2 for i in range(0, len(args), 2)) + + +""" +The rosenbrock_v1 +""" + + +def rosenbrock_v1(*args): + return sum(100 * (args[i+1] - args[i]**2)**2 + (A - args[i])**2 for i in range(0, len(args))) + + +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig({'x': [None] * 4}) + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.update_cfg() + +# --------------------------------------------------- # + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x': cfg.x, 'fx': rosenbrock_v0(*cfg.x)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py new file mode 100644 index 000000000..cf35c487d --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/batch.py @@ -0,0 +1,23 @@ +from netpyne.batchtools.search import search +import numpy + + +params = {'xn.0': numpy.arange(0, 5), + 'xn.1': numpy.arange(0, 5) + } + +# use shell_config if running directly on the machine +shell_config = {'command': 'python rosenbrock.py',} + +search(job_type = 'sh', # or sh + comm_type = 'socket', + label = 'grid', + params = params, + output_path = '../grid_batch', + checkpoint_path = '../ray', + run_config = {'command': 'python rosenbrock.py'}, + num_samples = 1, + metric = 'fx', + mode = 'min', + algorithm = 'variant_generator', + max_concurrent = 3) \ No newline at end of file diff --git a/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py new file mode 100644 index 000000000..b1956b2db --- /dev/null +++ b/netpyne/batchtools/examples/rosenbrock/nested_rosenbrock/rosenbrock.py @@ -0,0 +1,41 @@ +from netpyne.batchtools import specs, comm +import json + +# ---- Rosenbrock Function & Constant Definition ---- # + +""" +The rosenbrock minimum is at (A, A**2), where rosenbrock(A, A**2) = 0 +""" +def rosenbrock(x0, x1): + return 100 * (x1 - x0**2)**2 + (A - x0)**2 + +A = 1 +# --------------------------------------------------- # + +# ----------- cfg creation & batch update ----------- # + +cfg = specs.SimConfig() + +cfg.simLabel = 'rosenbrock' +cfg.saveFolder = '.' + +cfg.xn = [1, 1] + +cfg.update_cfg() + +# --------------------------------------------------- # + +# ---------------- unpacking x list ---------------- # +x0, x1 = cfg.xn +# --------------------------------------------------- # + + +# comm creation, calculation and result transmission # +comm.initialize() + +out_json = json.dumps({'x0': x0, 'x1': x1, 'fx': rosenbrock(x0, x1)}) +if comm.is_host(): + print(out_json) + comm.send(out_json) + comm.close() + diff --git a/netpyne/batchtools/runners.py b/netpyne/batchtools/runners.py index 687ef3645..278eed991 100644 --- a/netpyne/batchtools/runners.py +++ b/netpyne/batchtools/runners.py @@ -1,8 +1,25 @@ -from batchtk.runtk.utils import convert, set_map, create_script +#from batchtk.runtk.utils import convert, set_map, create_script from batchtk import runtk from batchtk.runtk.runners import Runner, get_class import os +def set_map(self, assign_path, value): + assigns = assign_path.split('.') + if len(assigns) == 1: + self.__setitem__(assigns[0], value) + return + crawler = self.__getitem__(assigns[0]) + for gi in assigns[1:-1]: + try: + crawler = crawler.__getitem__(gi) + except TypeError: # case for lists. + crawler = crawler.__getitem__(int(gi)) + try: + crawler.__setitem__(assigns[-1], value) + except TypeError: + crawler.__setitem__(int(assigns[-1]), value) + return + class NetpyneRunner(Runner): """ runner for netpyne @@ -47,12 +64,14 @@ def _set_inheritance(self, inherit): raise KeyError("inheritance {} not found in runtk.RUNNERS (please check runtk.RUNNERS for valid strings...".format(inherit)) - def get_NetParams(self): + def get_NetParams(self, netParamsDict=None): """ Creates / Returns a NetParams instance Parameters ---------- self + netParamsDict - optional dictionary to create NetParams instance (defaults to None) + - to be called during initial function call only Returns ------- @@ -63,7 +82,7 @@ def get_NetParams(self): return self.netParams else: from netpyne import specs - self.netParams = specs.NetParams() + self.netParams = specs.NetParams(netParamsDict) return self.netParams def update_cfg(self): #intended to take `cfg` instance as self @@ -84,12 +103,14 @@ def update_cfg(self): #intended to take `cfg` instance as self except Exception as e: raise Exception("failed on mapping: cfg.{} with value: {}\n{}".format(assign_path, value, e)) - def get_SimConfig(self): + def get_SimConfig(self, simConfigDict=None): """ Creates / Returns a SimConfig instance Parameters ---------- self - NetpyneRunner instance + simConfigDict - optional dictionary to create NetParams instance (defaults to None) + - to be called during initial function call only Returns ------- @@ -102,7 +123,7 @@ def get_SimConfig(self): self.cfg = type("Runner_SimConfig", (specs.SimConfig,), {'__mappings__': self.mappings, 'update_cfg': update_cfg, - 'update': update_cfg})() + 'update': update_cfg})(simConfigDict) return self.cfg def set_SimConfig(self): diff --git a/netpyne/cell/inputs.py b/netpyne/cell/inputs.py index 6ea1dcd99..53109e4dc 100644 --- a/netpyne/cell/inputs.py +++ b/netpyne/cell/inputs.py @@ -89,7 +89,9 @@ def createRhythmicPattern(params, rand): # Uniform Distribution elif distribution == 'uniform': n_inputs = params['repeats'] * freq * (stop - start) / 1000.0 - t_array = rand.uniform(start, stop, int(n_inputs)) + rand.uniform(start, stop) + vec = h.Vector(int(n_inputs)) + t_array = np.array(vec.setrand(rand)) if eventsPerCycle == 2: # Two arrays store doublet times t_input_low = t_array - 5 diff --git a/netpyne/network/conn.py b/netpyne/network/conn.py index 079631e07..cd7c1f4b8 100644 --- a/netpyne/network/conn.py +++ b/netpyne/network/conn.py @@ -406,9 +406,10 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of all-to-all connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (all-to-all connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=True, + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (all-to-all connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -427,11 +428,11 @@ def fullConn(self, preCellsTags, postCellsTags, connParam): } for postCellGid in postCellsTags: # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -510,9 +511,11 @@ def probConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of probabilistic connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + str(connParam['label']), position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) + + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + str(connParam['label']), position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (probabilistic connectivity)' % sim.rank) allRands = self.generateRandsPrePost(preCellsTags, postCellsTags) @@ -546,14 +549,14 @@ def probConn(self, preCellsTags, postCellsTags, connParam): probMatrix, allRands, connParam['disynapticBias'], prePreGids, postPreGids ) for preCellGid, postCellGid in connGids: - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args connParam[paramStrFunc + 'Args'] = { k: v if isinstance(v, Number) else v(preCellsTags[preCellGid], postCellsTags[postCellGid]) for k, v in connParam[paramStrFunc + 'Vars'].items() } self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # standard probabilistic conenctions else: # print('rank %d'%(sim.rank)) @@ -561,7 +564,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): # calculate the conn preGids of the each pre and post cell # for postCellGid,postCellTags in sorted(postCellsTags.items()): # for each postsyn cell for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell # for each postsyn cell - if sim.rank==0: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell probability = ( @@ -578,7 +581,7 @@ def probConn(self, preCellsTags, postCellsTags, connParam): ) # connParam[paramStrFunc+'Args'] = {k:v if isinstance(v, Number) else v(preCellTags,postCellTags) for k,v in connParam[paramStrFunc+'Vars'].items()} self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- # Generate random unique integers @@ -651,9 +654,10 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(postCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (convergent connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(postCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} postsynaptic cells on node %i (convergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -673,7 +677,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): hashPreCells = sim.hashList(preCellsTagsKeys) for postCellGid, postCellTags in postCellsTags.items(): # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) if postCellGid in self.gid2lid: # check if postsyn is in this node convergence = ( connParam['convergenceFunc'][postCellGid] @@ -706,7 +710,7 @@ def convConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -739,9 +743,10 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of divergent connections (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(preCellsTags.items()), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} presynaptic cells on node %i (divergent connectivity)' % sim.rank) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(preCellsTags.items()), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} presynaptic cells on node %i (divergent connectivity)' % sim.rank) # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] @@ -761,7 +766,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): hashPostCells = sim.hashList(postCellsTagsKeys) for preCellGid, preCellTags in preCellsTags.items(): # for each presyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) divergence = ( connParam['divergenceFunc'][preCellGid] if 'divergenceFunc' in connParam else connParam['divergence'] ) # num of presyn conns / postsyn cell @@ -788,7 +793,7 @@ def divConn(self, preCellsTags, postCellsTags, connParam): if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- @@ -821,10 +826,10 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): if sim.cfg.verbose: print('Generating set of connections from list (rule: %s) ...' % (connParam['label'])) - if sim.rank == 0 and not sim.cfg.verbose: pbar = tqdm(total=len(connParam['connList']), ascii=True, - desc=' ' + connParam['label'], position=0, leave=True, - bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} pairs of neurons on node %i (from list)' % sim.rank) - + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: + pbar = tqdm(total=len(connParam['connList']), ascii=True, + desc=' ' + connParam['label'], position=0, leave=(sim.cfg.progressBar == 2), + bar_format= '{l_bar}{bar}| Creating synaptic connections for {n_fmt}/{total_fmt} pairs of neurons on node %i (from list)' % sim.rank) orderedPreGids = sorted(preCellsTags) orderedPostGids = sorted(postCellsTags) @@ -864,7 +869,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): connParam['preSecFromList'] = list(connParam['preSec']) for iconn, (relativePreId, relativePostId) in enumerate(connParam['connList']): # for each postsyn cell - if sim.rank == 0 and not sim.cfg.verbose: pbar.update(1) + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.update(1) preCellGid = orderedPreGids[relativePreId] postCellGid = orderedPostGids[relativePostId] if postCellGid in self.gid2lid: # check if postsyn is in this node's list of gids @@ -885,7 +890,7 @@ def fromListConn(self, preCellsTags, postCellsTags, connParam): # TODO: consider cfg.allowSelfConns? if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid, preCellsTags) # add connection - if sim.rank == 0 and not sim.cfg.verbose: pbar.close() + if sim.rank == 0 and not sim.cfg.verbose and sim.cfg.progressBar: pbar.close() # ----------------------------------------------------------------------------- diff --git a/netpyne/sim/save.py b/netpyne/sim/save.py index 3bae90a9c..5e21f6924 100644 --- a/netpyne/sim/save.py +++ b/netpyne/sim/save.py @@ -103,26 +103,30 @@ def saveData(include=None, filename=None, saveLFP=True): print(('Copying cfg file %s ... ' % simName)) source = sim.cfg.backupCfgFile[0] targetFolder = sim.cfg.backupCfgFile[1] - # make dir + + # make directories required to make the target folder try: - os.mkdir(targetFolder) - except OSError: - if not os.path.exists(targetFolder): - print(' Could not create target folder: %s' % (targetFolder)) + os.makedirs(targetFolder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (targetFolder)) + # copy file targetFile = targetFolder + '/' + simName + '_cfg.py' if os.path.exists(targetFile): print(' Removing prior cfg file', targetFile) os.system('rm ' + targetFile) os.system('cp ' + source + ' ' + targetFile) - - # create folder if missing + # looks like the logic for sim.cfg.filename and targetFolder is not entirely there, + # could be calling os.path.dirname(None) ? #TODO + # create the missing folder & directory for folder if one or both are missing targetFolder = os.path.dirname(sim.cfg.filename) - if targetFolder and not os.path.exists(targetFolder): + if targetFolder: try: - os.mkdir(targetFolder) - except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + os.makedirs(targetFolder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (targetFolder)) # saving data if not include: @@ -175,14 +179,14 @@ def saveData(include=None, filename=None, saveLFP=True): if hasattr(sim.cfg, 'simLabel') and sim.cfg.simLabel: filePath = os.path.join(sim.cfg.saveFolder, sim.cfg.simLabel + '_data' + timestampStr) - # create folder if missing + # also strange conditional ^^^, filePath must exist or an error occurs. make directories for the target folder if they do not already exist targetFolder = os.path.dirname(filePath) - if targetFolder and not os.path.exists(targetFolder): + if targetFolder: try: - os.mkdir(targetFolder) - except OSError: - print(' Could not create target folder: %s' % (targetFolder)) - + os.makedirs(targetFolder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (targetFolder)) # Save to pickle file if sim.cfg.savePickle: import pickle @@ -390,12 +394,12 @@ def intervalSave(simTime, gatherLFP=True): targetFolder = os.path.join(sim.cfg.saveFolder, 'interval_data') else: targetFolder = 'interval_data' - - if targetFolder and not os.path.exists(targetFolder): - try: - os.makedirs(targetFolder) - except OSError: - print(' Could not create target folder: %s' % (targetFolder)) + # how can targetFolder ^^^ NOT have a value within this conditional + try: + os.makedirs(targetFolder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (targetFolder)) include = sim.cfg.saveDataInclude @@ -624,8 +628,11 @@ def saveDataInNodes(filename=None, saveLFP=True, removeTraces=False, saveFolder= else: saveFolder = os.path.join(saveFolder, sim.cfg.simLabel + '_node_data') # YES saveFolder - if not os.path.exists(saveFolder): + try: os.makedirs(saveFolder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (saveFolder)) sim.pc.barrier() if sim.rank == 0: @@ -776,7 +783,7 @@ def saveModel(netParams, simConfig, srcPath, dstPath=None, exportNetParamsAsPyth shutil.rmtree(dstDir) # (re)create dstDir and create /src dir in it where files will be stored by default - os.makedirs(os.path.join(dstDir, 'src')) + os.makedirs(os.path.join(dstDir, 'src'), exist_ok=True) # create default index indexData = { 'netParams': f"src/netParams{'.py' if exportNetParamsAsPython else '.json'}", diff --git a/netpyne/sim/setup.py b/netpyne/sim/setup.py index 4c1094cdc..cc8646726 100644 --- a/netpyne/sim/setup.py +++ b/netpyne/sim/setup.py @@ -463,6 +463,8 @@ def setupRecording(): break if sim.cfg.recordStim: + if sim.cfg.verbose: + print(" Recording stims") sim.simData['stims'] = Dict() for cell in sim.net.cells: cell.recordStimSpikes() @@ -495,6 +497,8 @@ def setupRecording(): # record h.t if sim.cfg.recordTime and len(sim.simData) > 0: + if sim.cfg.verbose: + print(" Recording h.t") try: sim.simData['t'] = h.Vector() # sim.cfg.duration/sim.cfg.recordStep+1).resize(0) if hasattr(sim.cfg, 'use_local_dt') and sim.cfg.use_local_dt: @@ -510,7 +514,8 @@ def setupRecording(): # print recorded traces cat = 0 total = 0 - for key in sim.simData: + keys = [k for k in sim.simData.keys() if k not in ['t', 'stims', 'spkt', 'spkid']] + for key in keys: if sim.cfg.verbose: print((" Recording: %s:" % key)) if len(sim.simData[key]) > 0: diff --git a/netpyne/specs/netParams.py b/netpyne/specs/netParams.py index 72643fe50..a483125a4 100644 --- a/netpyne/specs/netParams.py +++ b/netpyne/specs/netParams.py @@ -547,12 +547,12 @@ def save(self, filename): folder = filename.split(basename)[0] ext = basename.split('.')[1] - # make dir + # make directories if they do not already exist: try: - os.mkdir(folder) - except OSError: - if not os.path.exists(folder): - print(' Could not create', folder) + os.makedirs(folder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) dataSave = {'net': {'params': self.todict()}} diff --git a/netpyne/specs/simConfig.py b/netpyne/specs/simConfig.py index 45a1c475a..0e3e1728e 100644 --- a/netpyne/specs/simConfig.py +++ b/netpyne/specs/simConfig.py @@ -72,7 +72,7 @@ def __init__(self, simConfigDict=None): self.printPopAvgRates = False # print population avg firing rates after run self.printSynsAfterRule = False # print total of connections after each conn rule is applied self.verbose = False # show detailed messages - + self.progressBar = 2 # (0: no progress bar; 1: progress bar w/ leave = False; 2: progress bar w/ leave = True) # Recording self.recordCells = [] # what cells to record traces from (eg. 'all', 5, or 'PYR') self.recordTraces = {} # Dict of traces to record @@ -145,12 +145,12 @@ def save(self, filename): folder = filename.split(basename)[0] ext = basename.split('.')[1] - # make dir + # make directories if they do not already exist: try: - os.mkdir(folder) - except OSError: - if not os.path.exists(folder): - print(' Could not create', folder) + os.makedirs(folder, exist_ok=True) + except Exception as e: + print('%s: Exception: %s,' % (os.path.abspath(__file__), e)) + raise SystemExit('Could not create %s' % (folder)) dataSave = {'simConfig': self.__dict__}