From 43e618315a8ebf8107d8801e28c55797fe7a8b68 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 15 May 2024 15:26:04 +0200 Subject: [PATCH 01/19] Numeric solver for synapse models --- .../codegeneration/nest_code_generator.py | 3 + .../common/SynapseHeader.h.jinja2 | 98 +++++++++++++++---- pynestml/utils/ast_utils.py | 19 ++-- 3 files changed, 92 insertions(+), 28 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 27c99c17f..2a2abce9f 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -559,6 +559,9 @@ def _get_synapse_model_namespace(self, synapse: ASTSynapse) -> Dict: expr_ast.update_scope(synapse.get_equations_blocks()[0].get_scope()) expr_ast.accept(ASTSymbolTableVisitor()) namespace["numeric_update_expressions"][sym] = expr_ast + ASTUtils.assign_numeric_non_numeric_state_variables(synapse, namespace["numeric_state_variables"], namespace[ + "numeric_update_expressions"] if "numeric_update_expressions" in namespace.keys() else None, namespace[ + "update_expressions"] if "update_expressions" in namespace.keys() else None) namespace["spike_updates"] = synapse.spike_updates diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index e630c2ae6..899ea8641 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -69,6 +69,19 @@ along with NEST. If not, see . #include "volume_transmitter.h" {%- endif %} +{%- if uses_numeric_solver %} +{%- if numeric_solver == "rk45" %} + +#ifndef HAVE_GSL +#error "The GSL library is required for the Runge-Kutta solver." +#endif + +// External includes: +#include +#include +#include +{%- endif %} +{%- endif %} // Includes from sli: #include "dictdatum.h" @@ -299,12 +312,13 @@ private: //! state vector, must be C-array for GSL solver double ode_state[STATE_VEC_SIZE]; - // state variables from state block -{%- filter indent(4,True) %} -{%- for variable in synapse.get_state_symbols() %} -{%- include "directives_cpp/MemberDeclaration.jinja2" %} -{%- endfor %} -{%- endfilter %} +{# // state variables from state block#} +{#{%- filter indent(4,True) %}#} +{#{%- for variable_symbol in synapse.get_state_symbols() %}#} +{#{%- set variable = utils.get_state_variable_by_name(astnode, variable_symbol.get_symbol_name()) %}#} +{#{%- include "directives_cpp/MemberDeclaration.jinja2" %}#} +{#{%- endfor %}#} +{#{%- endfilter %}#} {%- endif %} State_() {}; @@ -372,9 +386,40 @@ private: {%- endfor %} }; +{%- if uses_numeric_solver %} +{%- if numeric_solver == "rk45" %} + struct Buffers_ + { + + // ----------------------------------------------------------------------- + // GSL ODE solver data structures + // ----------------------------------------------------------------------- + + gsl_odeiv_step* __s; //!< stepping function + gsl_odeiv_control* __c; //!< adaptive stepsize control function + gsl_odeiv_evolve* __e; //!< evolution function + gsl_odeiv_system __sys; //!< struct describing system + + // __integration_step should be reset with the neuron on ResetNetwork, + // but remain unchanged during calibration. Since it is initialized with + // step_, and the resolution cannot change after nodes have been created, + // it is safe to place both here. + double __step; //!< step size in ms + double __integration_step; //!< current integration time step, updated by GSL + + Buffers_() {}; + }; +{%- endif %} +{%- endif %} + Parameters_ P_; //!< Free parameters. State_ S_; //!< Dynamic state. Variables_ V_; //!< Internal Variables +{%- if uses_numeric_solver %} +{%- if numeric_solver == "rk45" %} + Buffers_ B_; //!< Buffers. +{%- endif %} +{%- endif %} {%- if synapse.get_state_symbols()|length > 0 %} // ------------------------------------------------------------------------- // Getters/setters for state block @@ -437,6 +482,8 @@ private: void recompute_internal_variables(); + std::string get_name() const; + public: // this line determines which common properties to use typedef {{synapseName}}CommonSynapseProperties CommonPropertiesType; @@ -1138,6 +1185,13 @@ void {{synapseName}}< targetidentifierT >::recompute_internal_variables() {%- endfilter %} } +template < typename targetidentifierT > +std::string {{synapseName}}< targetidentifierT >::get_name() const +{ + std::string s ("{{ synapseName }}"); + return s; +} + /** * constructor **/ @@ -1145,7 +1199,13 @@ template < typename targetidentifierT > {{synapseName}}< targetidentifierT >::{{synapseName}}() : ConnectionBase() { const double __resolution = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the resolution() function +{%- if uses_numeric_solver %} +{%- if numeric_solver == "rk45" %} + // use a default "good enough" value for the absolute error. It can be adjusted via `node.set()` + P_.__gsl_error_tol = 1e-3; +{%- endif %} +{%- endif %} {%- for variable_symbol in synapse.get_parameter_symbols() %} {%- set variable = utils.get_parameter_variable_by_name(astnode, variable_symbol.get_symbol_name()) %} {%- set isHomogeneous = PyNestMLLexer["DECORATOR_HOMOGENEOUS"] in variable_symbol.get_decorators() %} @@ -1222,19 +1282,19 @@ inline void const double old___h = V_.__h; V_.__h = timestep; recompute_internal_variables(); -{%- filter indent(2, True) %} -{%- with analytic_state_variables_ = analytic_state_variables %} -{%- include "directives_cpp/AnalyticIntegrationStep_begin.jinja2" %} -{%- endwith %} -{%- if uses_numeric_solver %} -{%- include "directives_cpp/GSLIntegrationStep.jinja2" %} -{%- endif %} -{%- with analytic_state_variables_ = analytic_state_variables %} -{%- include "directives_cpp/AnalyticIntegrationStep_end.jinja2" %} -{%- endwith %} -{%- endfilter %} - V_.__h = old___h; - recompute_internal_variables(); // XXX: can be skipped? +{#{%- filter indent(2, True) %}#} +{#{%- with analytic_state_variables_ = analytic_state_variables %}#} +{#{%- include "directives_cpp/AnalyticIntegrationStep_begin.jinja2" %}#} +{#{%- endwith %}#} +{#{%- if uses_numeric_solver %}#} +{#{%- include "directives_cpp/GSLIntegrationStep.jinja2" %}#} +{#{%- endif %}#} +{#{%- with analytic_state_variables_ = analytic_state_variables %}#} +{#{%- include "directives_cpp/AnalyticIntegrationStep_end.jinja2" %}#} +{#{%- endwith %}#} +{#{%- endfilter %}#} +{# V_.__h = old___h;#} +{# recompute_internal_variables(); // XXX: can be skipped?#} // NESTML generated code for the update block: diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 1e8b28bb4..1047cb5b9 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2174,7 +2174,7 @@ def get_spike_input_ports_in_pairs(cls, neuron: ASTNeuron) -> Dict[int, List[Var return rport_to_port_map @classmethod - def assign_numeric_non_numeric_state_variables(cls, neuron, numeric_state_variable_names, numeric_update_expressions, update_expressions): + def assign_numeric_non_numeric_state_variables(cls, model, numeric_state_variable_names, numeric_update_expressions, update_expressions): r"""For each ASTVariable, set the ``node._is_numeric`` member to True or False based on whether this variable will be solved with the analytic or numeric solver. Ideally, this would not be a property of the ASTVariable as it is an implementation detail (that only emerges during code generation) and not an intrinsic part of the model itself. However, this approach is preferred over setting it as a property of the variable printers as it would have to make each printer aware of all models and variables therein.""" @@ -2193,10 +2193,10 @@ def visit_variable(self, node): visitor = ASTVariableOriginSetterVisitor() visitor._numeric_state_variables = numeric_state_variable_names - neuron.accept(visitor) + model.accept(visitor) - if "moved_spike_updates" in dir(neuron): - for expr in neuron.moved_spike_updates: + if "moved_spike_updates" in dir(model): + for expr in model.moved_spike_updates: expr.accept(visitor) if update_expressions: @@ -2207,12 +2207,13 @@ def visit_variable(self, node): for expr in numeric_update_expressions.values(): expr.accept(visitor) - for update_expr_list in neuron.spike_updates.values(): + for update_expr_list in model.spike_updates.values(): for update_expr in update_expr_list: update_expr.accept(visitor) - for update_expr in neuron.post_spike_updates.values(): - update_expr.accept(visitor) + if isinstance(model, ASTNeuron): + for update_expr in model.post_spike_updates.values(): + update_expr.accept(visitor) - for node in neuron.equations_with_delay_vars + neuron.equations_with_vector_vars: - node.accept(visitor) + for node in model.equations_with_delay_vars + model.equations_with_vector_vars: + node.accept(visitor) From a50f875c1b2a97f7b9252735b1f23a085a212135 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Sun, 2 Jun 2024 21:23:55 +0200 Subject: [PATCH 02/19] Add numeric solver for synapses --- .../common/SynapseHeader.h.jinja2 | 16 ++++++++ .../GSLDifferentiationFunction.jinja2 | 37 ++++++++++++++++--- .../directives_cpp/GSLIntegrationStep.jinja2 | 4 ++ pynestml/utils/ast_utils.py | 3 +- 4 files changed, 54 insertions(+), 6 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index eedcc971b..d7954b99d 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -115,6 +115,12 @@ namespace {{names_namespace}} {%- endif %} } +{%- if uses_numeric_solver %} +{%- for s in utils.create_integrate_odes_combinations(astnode) %} +extern "C" inline int {{neuronName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); +{%- endfor %} +{%- endif %} + class {{synapseName}}CommonSynapseProperties : public CommonSynapseProperties { public: @@ -236,6 +242,12 @@ public: {%- endif %} }; +{% if uses_numeric_solver %} +{%- for ast in utils.get_all_integrate_odes_calls_unique(synapse) %} +{%- include "directives_cpp/GSLDifferentiationFunction.jinja2" %} +{%- endfor %} +{%- endif %} + template < typename targetidentifierT > class {{synapseName}} : public Connection< targetidentifierT > { @@ -420,7 +432,11 @@ private: {%- if numeric_solver == "rk45" %} Buffers_ B_; //!< Buffers. {%- endif %} +{%- for s in utils.create_integrate_odes_combinations(astnode) %} + friend int {{neuronName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); +{%- endfor %} {%- endif %} + {%- if synapse.get_state_symbols()|length > 0 %} // ------------------------------------------------------------------------- // Getters/setters for state block diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 index c40f71cf6..6c642a072 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 @@ -1,16 +1,22 @@ {# Creates GSL implementation of the differentiation step for the system of ODEs. -#} -extern "C" inline int {{neuronName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}(double __time, const double ode_state[], double f[], void* pnode) +{%- if neuronName is defined %} +{%- set modelName = neuronName %} +{%- else %} +{%- set modelName = synapseName %} +{%- endif %} +extern "C" inline int {{modelName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}(double __time, const double ode_state[], double f[], void* pnode) { - typedef {{neuronName}}::State_ State_; + typedef {{modelName}}::State_ State_; // get access to node so we can almost work as in a member function assert( pnode ); - const {{neuronName}}& node = *( reinterpret_cast< {{neuronName}}* >( pnode ) ); + const {{modelName}}& node = *( reinterpret_cast< {{modelName}}* >( pnode ) ); // ode_state[] here is---and must be---the state vector supplied by the integrator, // not the state vector in the node, node.S_.ode_state[]. +{%- if neuronName is defined %} {%- for eq_block in neuron.get_equations_blocks() %} {%- for ode in eq_block.get_declarations() %} {%- for inline_expr in utils.get_inline_expression_symbols(ode) %} @@ -22,7 +28,20 @@ extern "C" inline int {{neuronName}}_dynamics{% if ast.get_args() | length > 0 % {%- endfor %} {%- endfor %} -{%- if use_gap_junctions %} +{%- else %} +{%- for eq_block in synapse.get_equations_blocks() %} +{%- for ode in eq_block.get_declarations() %} +{%- for inline_expr in utils.get_inline_expression_symbols(ode) %} +{%- if not inline_expr.is_equation() %} +{%- set declaring_expr = inline_expr.get_declaring_expression() %} + double {{ printer.print(utils.get_state_variable_by_name(astnode, inline_expr)) }} = {{ gsl_printer.print(declaring_expr) }}; +{%- endif %} +{%- endfor %} +{%- endfor %} +{%- endfor %} +{%- endif %} + +{%- if use_gap_junctions and neuronName is defined %} // set I_gap depending on interpolation order double __I_gap = 0.0; @@ -51,7 +70,7 @@ extern "C" inline int {{neuronName}}_dynamics{% if ast.get_args() | length > 0 % } {%- endif %} - +{%- if neuronName is defined %} {%- for variable_name in numeric_state_variables + numeric_state_variables_moved %} {%- set update_expr = numeric_update_expressions[variable_name] %} {%- set variable_symbol = variable_symbols[variable_name] %} @@ -62,6 +81,14 @@ extern "C" inline int {{neuronName}}_dynamics{% if ast.get_args() | length > 0 % {%- endif %} {%- endfor %} +{%- else %} +{%- for variable_name in numeric_state_variables %} +{%- set update_expr = numeric_update_expressions[variable_name] %} +{%- set variable_symbol = variable_symbols[variable_name] %} + f[State_::{{ variable_symbol.get_symbol_name() }}] = {% if ast.get_args() | length > 0 %}{% if variable_name in utils.integrate_odes_args_strs_from_function_call(ast) + utils.all_convolution_variable_names(astnode) %}{{ gsl_printer.print(update_expr) }}{% else %}0{% endif %}{% else %}{{ gsl_printer.print(update_expr) }}{% endif %}; +{%- endfor %} +{%- endif %} + {%- if numeric_solver == "rk45" %} return GSL_SUCCESS; {%- else %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 index c31b33511..aaefd78e5 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 @@ -5,7 +5,11 @@ {%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} {%- if numeric_solver == "rk45" %} double __t = 0; +{%- if neuronName == None %} B_.__sys.function = {{neuronName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}; +{%- else %} +B_.__sys.function = {{synapseName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}; +{%- endif %} // numerical integration with adaptive step size control: // ------------------------------------------------------ // gsl_odeiv_evolve_apply performs only a single numerical diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index d6aadf6ee..7c0408950 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2454,10 +2454,11 @@ def visit_variable(self, node): for update_expr in update_expr_list: update_expr.accept(visitor) - if isinstance(model, ASTNeuron): + if "post_spike_updates" in dir(model): for update_expr in model.post_spike_updates.values(): update_expr.accept(visitor) + if "equations_with_delay_vars" in dir(model): for node in model.equations_with_delay_vars + model.equations_with_vector_vars: node.accept(visitor) From 0aa8ba1903648841bb15db0376f27798dcd4ce33 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Thu, 20 Jun 2024 16:24:31 +0200 Subject: [PATCH 03/19] Numeric solution --- .../resources_nest/point_neuron/common/SynapseHeader.h.jinja2 | 1 - 1 file changed, 1 deletion(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 52f2d80bf..82e5ddd14 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -455,7 +455,6 @@ inline void set_{{ variable.get_name() }}(const {{ declarations.print_variable_t {%- endif %} {%- endfor %} {%- endfilter %} -{%- endif %} // ------------------------------------------------------------------------- // Getters/setters for inline expressions From 80faa9303169eeee3e9709a42dddde4e64a2c8cc Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Fri, 13 Dec 2024 10:26:59 +0100 Subject: [PATCH 04/19] Add numneric solver to synapses --- .../directives_cpp/PredefinedFunction_integrate_odes.jinja2 | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/PredefinedFunction_integrate_odes.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/PredefinedFunction_integrate_odes.jinja2 index b630f329a..dda60d5e9 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/PredefinedFunction_integrate_odes.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/PredefinedFunction_integrate_odes.jinja2 @@ -25,7 +25,12 @@ {%- if uses_numeric_solver %} +{%- if neuronName is defined %} {% set numeric_state_variables_to_be_integrated = numeric_state_variables + purely_numeric_state_variables_moved %} +{%- else %} +{% set numeric_state_variables_to_be_integrated = numeric_state_variables %} +{%- endif %} + {%- if ast.get_args() | length > 0 %} {%- set numeric_state_variables_to_be_integrated = utils.filter_variables_list(numeric_state_variables_to_be_integrated, ast.get_args()) %} {%- endif %} From 129f4c90ebd81cb41091d4cefd7fffca5ac5f82c Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 7 May 2025 16:24:42 +0200 Subject: [PATCH 05/19] Add numeric solver to synapse template --- .../common/SynapseHeader.h.jinja2 | 162 +++++++++++------- .../GSLDifferentiationFunction.jinja2 | 7 +- .../directives_cpp/GSLIntegrationStep.jinja2 | 28 ++- 3 files changed, 128 insertions(+), 69 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index d5f0bc182..7f3c5bc04 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -114,15 +114,16 @@ namespace {{names_namespace}} const Name _{{sym.get_symbol_name()}}( "{{sym.get_symbol_name()}}" ); {%- endfor %} {%- endif %} -} +} // end namespace; {%- if uses_numeric_solver %} {%- for s in utils.create_integrate_odes_combinations(astnode) %} -extern "C" inline int {{neuronName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); +extern "C" inline int {{synapseName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); {%- endfor %} {%- endif %} -class {{synapseName}}CommonSynapseProperties : public CommonSynapseProperties { +class {{synapseName}}CommonSynapseProperties : public CommonSynapseProperties +{ public: {{synapseName}}CommonSynapseProperties() @@ -233,49 +234,7 @@ public: } {%- endif %} -}; - -{% if uses_numeric_solver %} -{%- for ast in utils.get_all_integrate_odes_calls_unique(synapse) %} -{%- include "directives_cpp/GSLDifferentiationFunction.jinja2" %} -{%- endfor %} -{%- endif %} - -template < typename targetidentifierT > -class {{synapseName}} : public Connection< targetidentifierT > -{ -{%- if paired_neuron_name | length > 0 %} - typedef {{ paired_neuron_name }} post_neuron_t; - -{% endif %} -{%- if vt_ports is defined and vt_ports|length > 0 %} -public: -{%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - void trigger_update_weight( size_t t, - const std::vector< spikecounter >& vt_spikes, - double t_trig, - const {{synapseName}}CommonSynapseProperties& cp ); -{%- else %} - void trigger_update_weight( thread t, - const std::vector< spikecounter >& vt_spikes, - double t_trig, - const {{synapseName}}CommonSynapseProperties& cp ); -{%- endif %} -{%- endif %} -private: - double t_lastspike_; -{%- if vt_ports is defined and vt_ports|length > 0 %} - // time of last update, which is either time of last presyn. spike or time-driven update - double t_last_update_; - - // vt_spikes_idx_ refers to the vt spike that has just been processed after trigger_update_weight - // a pseudo vt spike at t_trig is stored at index 0 and vt_spikes_idx_ = 0 -{%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - size_t vt_spikes_idx_; -{%- else %} - index vt_spikes_idx_; -{%- endif %} -{%- endif %} +}; // end class {{synapseName}}CommonSynapseProperties /** * Dynamic state of the synapse. @@ -328,7 +287,7 @@ private: {%- endif %} State_() {}; - }; + }; // end State_ /** * Free parameters of the synapse. @@ -366,7 +325,44 @@ private: /** Initialize parameters to their default values. */ Parameters_() {}; - }; + }; // end Parameters_ + + +template < typename targetidentifierT > +class {{synapseName}} : public Connection< targetidentifierT > +{ +{%- if paired_neuron_name | length > 0 %} + typedef {{ paired_neuron_name }} post_neuron_t; + +{% endif %} +{%- if vt_ports is defined and vt_ports|length > 0 %} +public: +{%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} + void trigger_update_weight( size_t t, + const std::vector< spikecounter >& vt_spikes, + double t_trig, + const {{synapseName}}CommonSynapseProperties& cp ); +{%- else %} + void trigger_update_weight( thread t, + const std::vector< spikecounter >& vt_spikes, + double t_trig, + const {{synapseName}}CommonSynapseProperties& cp ); +{%- endif %} +{%- endif %} +private: + double t_lastspike_; +{%- if vt_ports is defined and vt_ports|length > 0 %} + // time of last update, which is either time of last presyn. spike or time-driven update + double t_last_update_; + + // vt_spikes_idx_ refers to the vt spike that has just been processed after trigger_update_weight + // a pseudo vt spike at t_trig is stored at index 0 and vt_spikes_idx_ = 0 +{%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} + size_t vt_spikes_idx_; +{%- else %} + index vt_spikes_idx_; +{%- endif %} +{%- endif %} /** * Internal variables of the synapse. @@ -388,13 +384,6 @@ private: {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} - struct Buffers_ - { - - // ----------------------------------------------------------------------- - // GSL ODE solver data structures - // ----------------------------------------------------------------------- - gsl_odeiv_step* __s; //!< stepping function gsl_odeiv_control* __c; //!< adaptive stepsize control function gsl_odeiv_evolve* __e; //!< evolution function @@ -406,21 +395,15 @@ private: // it is safe to place both here. double __step; //!< step size in ms double __integration_step; //!< current integration time step, updated by GSL - - Buffers_() {}; - }; {%- endif %} {%- endif %} Parameters_ P_; //!< Free parameters. State_ S_; //!< Dynamic state. Variables_ V_; //!< Internal Variables -{%- if synapse.get_state_symbols()|length > 0 or synapse.get_parameter_symbols()|length > 0 %} -{%- if numeric_solver == "rk45" %} - Buffers_ B_; //!< Buffers. -{%- endif %} + {%- for s in utils.create_integrate_odes_combinations(astnode) %} - friend int {{neuronName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); + friend int {{synapseName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); {%- endfor %} {%- endif %} @@ -1130,6 +1113,14 @@ void } {%- endif %} +/* +** Synapse dynamics +*/ +{% if uses_numeric_solver %} +{%- for ast in utils.get_all_integrate_odes_calls_unique(synapse) %} +{%- include "directives_cpp/GSLDifferentiationFunction.jinja2" %} +{%- endfor %} +{%- endif %} template < typename targetidentifierT > void @@ -1323,6 +1314,41 @@ template < typename targetidentifierT > {%- endif %} {%- endif %} +{%- if uses_numeric_solver and numeric_solver == "rk45" %} + if ( not __s ) + { + __s = gsl_odeiv_step_alloc( gsl_odeiv_step_rkf45, State_::STATE_VEC_SIZE ); + } + else + { + gsl_odeiv_step_reset( __s ); + } + + if ( not __c ) + { + __c = gsl_odeiv_control_y_new( P_.__gsl_abs_error_tol, P_.__gsl_rel_error_tol ); + } + else + { + gsl_odeiv_control_init( __c, P_.__gsl_abs_error_tol, P_.__gsl_rel_error_tol, 1.0, 0.0 ); + } + + if ( not __e ) + { + __e = gsl_odeiv_evolve_alloc( State_::STATE_VEC_SIZE ); + } + else + { + gsl_odeiv_evolve_reset( __e ); + } + + __sys.jacobian = nullptr; + __sys.dimension = State_::STATE_VEC_SIZE; + __sys.params = reinterpret_cast< void* >( &P_ ); + __step = nest::Time::get_resolution().get_ms(); + __integration_step = nest::Time::get_resolution().get_ms(); +{%- endif %} + t_lastspike_ = 0.; {%- if vt_ports is defined and vt_ports|length > 0 %} t_last_update_ = 0.; @@ -1358,6 +1384,16 @@ template < typename targetidentifierT > {%- endif %} t_lastspike_ = rhs.t_lastspike_; +{%- if uses_numeric_solver and numeric_solver == "rk45" %} + // Numeric solver variables + __s = rhs.__s; + __c = rhs.__c; + __e = rhs.__e; + __sys = rhs.__sys; + __step = rhs.__step; + __integration_step = rhs.__integration_step; +{%- endif %} + // special treatment of NEST delay set_delay(rhs.get_delay()); {%- if synapse_weight_variable | length > 0 %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 index b63b3c9c1..a6af1dd78 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 @@ -11,8 +11,11 @@ extern "C" inline int {{modelName}}_dynamics{% if ast.get_args() | length > 0 %} typedef {{modelName}}::State_ State_; // get access to node so we can almost work as in a member function assert( pnode ); - const {{modelName}}& node = *( reinterpret_cast< {{modelName}}* >( pnode ) ); - +{%- if neuronName is defined %} + const {{neuronName}}& node = *( reinterpret_cast< {{neuronName}}* >( pnode ) ); +{%- else %} + const {{synapseName}}& node = *( reinterpret_cast< {{Parameters_}}* >( pnode ) ); +{%- endif %} // ode_state[] here is---and must be---the state vector supplied by the integrator, // not the state vector in the node, node.S_.ode_state[]. diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 index 22afe0546..c0617ded1 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLIntegrationStep.jinja2 @@ -5,10 +5,10 @@ {%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} {%- if numeric_solver == "rk45" %} double __t = 0; -{%- if neuronName == None %} +{%- if neuronName is defined %} B_.__sys.function = {{neuronName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}; {%- else %} -B_.__sys.function = {{synapseName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}; +__sys.function = {{synapseName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}; {%- endif %} // numerical integration with adaptive step size control: // ------------------------------------------------------ @@ -22,11 +22,12 @@ B_.__sys.function = {{synapseName}}_dynamics{% if ast.get_args() | length > 0 %} // enforce setting IntegrationStep to step-t; this is of advantage // for a consistent and efficient integration across subsequent // simulation intervals +{%- if neuronName is defined %} while ( __t < B_.__step ) { -{%- if use_gap_junctions %} +{%- if use_gap_junctions %} gap_junction_step = B_.__step; -{%- endif %} +{%- endif %} const int status = gsl_odeiv_evolve_apply(B_.__e, B_.__c, @@ -42,6 +43,25 @@ while ( __t < B_.__step ) throw nest::GSLSolverFailure( get_name(), status ); } } +{%- else %} +while ( __t < timestep ) +{ + const int status = gsl_odeiv_evolve_apply(__e, + __c, + __s, + &__sys, // system of ODE + &__t, // from t + timestep, // to t <= step + &__integration_step, // integration step size + S_.ode_state); // neuronal state + + if ( status != GSL_SUCCESS ) + { + throw nest::GSLSolverFailure( get_name(), status ); + } + } +{%- endif %} + {%- elif numeric_solver == "forward-Euler" %} double f[State_::STATE_VEC_SIZE]; {{neuronName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}( get_t(), S_.ode_state, f, reinterpret_cast< void* >( this ) ); From 4c5b06c83cfa84e9d9f14a22daac90d5fae37440 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Thu, 8 May 2025 15:58:08 +0200 Subject: [PATCH 06/19] Modify synapse templates --- .../codegeneration/nest_code_generator.py | 16 ++++-- .../printers/gsl_variable_printer.py | 53 ++++++++++--------- .../common/SynapseHeader.h.jinja2 | 15 +++--- .../GSLDifferentiationFunction.jinja2 | 11 ++-- 4 files changed, 54 insertions(+), 41 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 365f27b03..cbbff3019 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -223,14 +223,23 @@ def setup_printers(self): self._gsl_variable_printer = GSLVariablePrinter(None) if self.option_exists("nest_version") and (self.get_option("nest_version").startswith("2") or self.get_option("nest_version").startswith("v2")): self._gsl_function_call_printer = NEST2GSLFunctionCallPrinter(None) + self._gsl_function_call_printer_no_origin = NEST2GSLFunctionCallPrinter(None) else: self._gsl_function_call_printer = NESTGSLFunctionCallPrinter(None) + self._gsl_function_call_printer_no_origin = NEST2GSLFunctionCallPrinter(None) self._gsl_printer = CppExpressionPrinter(simple_expression_printer=CppSimpleExpressionPrinter(variable_printer=self._gsl_variable_printer, constant_printer=self._constant_printer, function_call_printer=self._gsl_function_call_printer)) self._gsl_function_call_printer._expression_printer = self._gsl_printer + self._gsl_variable_printer_no_origin = GSLVariablePrinter(None, with_origin=False) + self._gsl_printer_no_origin = CppExpressionPrinter(simple_expression_printer=CppSimpleExpressionPrinter(variable_printer=self._gsl_variable_printer_no_origin, + constant_printer=self._constant_printer, + function_call_printer=self._gsl_function_call_printer)) + self._gsl_variable_printer_no_origin._expression_printer = self._gsl_printer_no_origin + self._gsl_function_call_printer_no_origin._expression_printer = self._gsl_printer_no_origin + # ODE-toolbox printers self._ode_toolbox_variable_printer = ODEToolboxVariablePrinter(None) self._ode_toolbox_function_call_printer = ODEToolboxFunctionCallPrinter(None) @@ -518,6 +527,7 @@ def _get_model_namespace(self, astnode: ASTModel) -> Dict: namespace["printer"] = self._nest_printer namespace["printer_no_origin"] = self._printer_no_origin namespace["gsl_printer"] = self._gsl_printer + namespace["gsl_printer_no_origin"] = self._gsl_printer_no_origin namespace["nestml_printer"] = NESTMLPrinter() namespace["type_symbol_printer"] = self._type_symbol_printer @@ -663,9 +673,9 @@ def _get_synapse_model_namespace(self, synapse: ASTModel) -> Dict: expr_ast.update_scope(synapse.get_equations_blocks()[0].get_scope()) expr_ast.accept(ASTSymbolTableVisitor()) namespace["numeric_update_expressions"][sym] = expr_ast - ASTUtils.assign_numeric_non_numeric_state_variables(synapse, namespace["numeric_state_variables"], namespace[ - "numeric_update_expressions"] if "numeric_update_expressions" in namespace.keys() else None, namespace[ - "update_expressions"] if "update_expressions" in namespace.keys() else None) + + ASTUtils.assign_numeric_non_numeric_state_variables(synapse, namespace["numeric_state_variables"], + namespace["numeric_update_expressions"] if "numeric_update_expressions" in namespace.keys() else None, namespace["update_expressions"] if "update_expressions" in namespace.keys() else None) namespace["spike_updates"] = synapse.spike_updates diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py index 463833a43..aa638843c 100644 --- a/pynestml/codegeneration/printers/gsl_variable_printer.py +++ b/pynestml/codegeneration/printers/gsl_variable_printer.py @@ -18,8 +18,10 @@ # # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from pynestml.codegeneration.nest_code_generator_utils import NESTCodeGeneratorUtils from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter +from pynestml.codegeneration.printers.expression_printer import ExpressionPrinter from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbols.predefined_units import PredefinedUnits from pynestml.symbols.symbol import SymbolKind @@ -33,46 +35,39 @@ class GSLVariablePrinter(CppVariablePrinter): Variable printer for C++ syntax and using the GSL (GNU Scientific Library) API from inside the ``extern "C"`` stepping function. """ - def print_variable(self, node: ASTVariable) -> str: + def __init__(self, expression_printer: ExpressionPrinter, with_origin: bool = True, ): + super().__init__(expression_printer) + self.with_origin = with_origin + + def print_variable(self, variable: ASTVariable) -> str: """ Converts a single name reference to a gsl processable format. - :param node: a single variable + :param variable: a single variable :return: a gsl processable format of the variable """ - assert isinstance(node, ASTVariable) - symbol = node.get_scope().resolve_to_symbol(node.get_complete_name(), SymbolKind.VARIABLE) + assert isinstance(variable, ASTVariable) + symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE) if symbol is None: # test if variable name can be resolved to a type - if PredefinedUnits.is_unit(node.get_complete_name()): - return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(node.get_complete_name()).get_unit())) + if PredefinedUnits.is_unit(variable.get_complete_name()): + return str( + NESTUnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())) - code, message = Messages.get_could_not_resolve(node.get_name()) + code, message = Messages.get_could_not_resolve(variable.get_name()) Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message, - error_position=node.get_source_position()) + error_position=variable.get_source_position()) return "" - if node.is_delay_variable(): - return self._print_delay_variable(node) + if variable.is_delay_variable(): + return self._print_delay_variable(variable) if symbol.is_state() and not symbol.is_inline_expression: - if "_is_numeric" in dir(node) and node._is_numeric: + if "_is_numeric" in dir(variable) and variable._is_numeric: # ode_state[] here is---and must be---the state vector supplied by the integrator, not the state vector in the node, node.S_.ode_state[]. - return "ode_state[State_::" + CppVariablePrinter._print_cpp_name(node.get_complete_name()) + "]" - - # non-ODE state symbol - return "node.S_." + CppVariablePrinter._print_cpp_name(node.get_complete_name()) - - if symbol.is_parameters(): - return "node.P_." + super().print_variable(node) - - if symbol.is_internals(): - return "node.V_." + super().print_variable(node) + return "ode_state[State_::" + CppVariablePrinter._print_cpp_name(variable.get_complete_name()) + "]" - if symbol.is_input(): - return "node.B_." + self._print_buffer_value(node) - - raise Exception("Unknown node type") + return self._print(variable, symbol, with_origin=self.with_origin) def _print_delay_variable(self, variable: ASTVariable) -> str: """ @@ -104,3 +99,11 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: return "spike_inputs_grid_sum_[node." + var_name + " - node.MIN_SPIKE_RECEPTOR]" return variable_symbol.get_symbol_name() + '_grid_sum_' + + def _print(self, variable, symbol, with_origin: bool = True): + variable_name = CppVariablePrinter._print_cpp_name(variable.get_complete_name()) + + if with_origin: + return "node." + NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable) % variable_name + + return "node." + variable_name diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 7f3c5bc04..d216f5acc 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -280,8 +280,10 @@ public: {# // state variables from state block#} {%- filter indent(4,True) %} {%- for variable_symbol in synapse.get_state_symbols() %} -{%- set variable = utils.get_state_variable_by_name(astnode, variable_symbol.get_symbol_name()) %} -{%- include "directives_cpp/MemberDeclaration.jinja2" %} +{% if variable_symbol.get_symbol_name() not in numeric_state_variables %} +{%- set variable = utils.get_state_variable_by_name(astnode, variable_symbol.get_symbol_name()) %} +{%- include "directives_cpp/MemberDeclaration.jinja2" %} +{%- endif %} {%- endfor %} {%- endfilter %} {%- endif %} @@ -322,6 +324,8 @@ public: {%- endif %} {%- endfor %} {%- endfilter %} + double __gsl_abs_error_tol; + double __gsl_rel_error_tol; /** Initialize parameters to their default values. */ Parameters_() {}; @@ -405,7 +409,6 @@ private: {%- for s in utils.create_integrate_odes_combinations(astnode) %} friend int {{synapseName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); {%- endfor %} -{%- endif %} // ------------------------------------------------------------------------- // Getters/setters for parameters and state variables @@ -1272,13 +1275,7 @@ template < typename targetidentifierT > {{synapseName}}< targetidentifierT >::{{synapseName}}() : ConnectionBase() { const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function -{%- if uses_numeric_solver %} -{%- if numeric_solver == "rk45" %} - // use a default "good enough" value for the absolute error. It can be adjusted via `node.set()` - P_.__gsl_error_tol = 1e-3; -{%- endif %} -{%- endif %} // initial values for parameters {%- filter indent(2, True) %} {%- for variable_symbol in synapse.get_parameter_symbols() %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 index a6af1dd78..eb69c9d9e 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 @@ -8,13 +8,16 @@ {%- endif %} extern "C" inline int {{modelName}}_dynamics{% if ast.get_args() | length > 0 %}_{{ utils.integrate_odes_args_str_from_function_call(ast) }}{% endif %}(double __time, const double ode_state[], double f[], void* pnode) { +{%- if neuronName is defined %} typedef {{modelName}}::State_ State_; - // get access to node so we can almost work as in a member function + // get access to node so we can almost work as in a member function assert( pnode ); -{%- if neuronName is defined %} const {{neuronName}}& node = *( reinterpret_cast< {{neuronName}}* >( pnode ) ); {%- else %} - const {{synapseName}}& node = *( reinterpret_cast< {{Parameters_}}* >( pnode ) ); + typedef nest::State_ State_; + // get access to node so we can almost work as in a member function + assert( pnode ); + const Parameters_& node = *( reinterpret_cast< Parameters_* >( pnode ) ); {%- endif %} // ode_state[] here is---and must be---the state vector supplied by the integrator, // not the state vector in the node, node.S_.ode_state[]. @@ -92,7 +95,7 @@ extern "C" inline int {{modelName}}_dynamics{% if ast.get_args() | length > 0 %} {%- for variable_name in numeric_state_variables %} {%- set update_expr = numeric_update_expressions[variable_name] %} {%- set variable_symbol = variable_symbols[variable_name] %} - f[State_::{{ variable_symbol.get_symbol_name() }}] = {% if ast.get_args() | length > 0 %}{% if variable_name in utils.integrate_odes_args_strs_from_function_call(ast) + utils.all_convolution_variable_names(astnode) %}{{ gsl_printer.print(update_expr) }}{% else %}0{% endif %}{% else %}{{ gsl_printer.print(update_expr) }}{% endif %}; + f[State_::{{ variable_symbol.get_symbol_name() }}] = {% if ast.get_args() | length > 0 %}{% if variable_name in utils.integrate_odes_args_strs_from_function_call(ast) + utils.all_convolution_variable_names(astnode) %}{{ gsl_printer_no_origin.print(update_expr) }}{% else %}0{% endif %}{% else %}{{ gsl_printer_no_origin.print(update_expr) }}{% endif %}; {%- endfor %} {%- endif %} From ee597620a73697abed3e4203cb3468e3866570fb Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Thu, 8 May 2025 16:16:21 +0200 Subject: [PATCH 07/19] Modify synapse templates --- .../point_neuron/common/SynapseHeader.h.jinja2 | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index d216f5acc..2a120bf35 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -388,9 +388,9 @@ private: {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} - gsl_odeiv_step* __s; //!< stepping function - gsl_odeiv_control* __c; //!< adaptive stepsize control function - gsl_odeiv_evolve* __e; //!< evolution function + gsl_odeiv_step* __s = nullptr; //!< stepping function + gsl_odeiv_control* __c = nullptr; //!< adaptive stepsize control function + gsl_odeiv_evolve* __e = nullptr; //!< evolution function gsl_odeiv_system __sys; //!< struct describing system // __integration_step should be reset with the neuron on ResetNetwork, @@ -1275,7 +1275,6 @@ template < typename targetidentifierT > {{synapseName}}< targetidentifierT >::{{synapseName}}() : ConnectionBase() { const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function - // initial values for parameters {%- filter indent(2, True) %} {%- for variable_symbol in synapse.get_parameter_symbols() %} @@ -1287,6 +1286,10 @@ template < typename targetidentifierT > {%- endif %} {%- endif %} {%- endfor %} +{%- if uses_numeric_solver and numeric_solver == "rk45" %} +P_.__gsl_abs_error_tol = 1e-6; +P_.__gsl_rel_error_tol = 1e-6; +{%- endif %} {%- endfilter %} V_.__h = nest::Time::get_resolution().get_ms(); From 07a45ec0bffee719f48adaa4c87d536f359c7056 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Mon, 7 Jul 2025 13:01:49 +0200 Subject: [PATCH 08/19] Add test for non-linear synpase --- tests/nest_tests/resources/stp_synapse.nestml | 38 +++++ .../nest_tests/test_synapse_numeric_solver.py | 131 ++++++++++++++++++ 2 files changed, 169 insertions(+) create mode 100644 tests/nest_tests/resources/stp_synapse.nestml create mode 100644 tests/nest_tests/test_synapse_numeric_solver.py diff --git a/tests/nest_tests/resources/stp_synapse.nestml b/tests/nest_tests/resources/stp_synapse.nestml new file mode 100644 index 000000000..70c50eb74 --- /dev/null +++ b/tests/nest_tests/resources/stp_synapse.nestml @@ -0,0 +1,38 @@ +model stp_synapse: + input: + pre_spikes <- spike + + output: + spike(weight real, delay ms) + + state: + w real = 1 / U_0 # synaptic (baseline) weight + x real = 1. # fraction of available resources after neurotransmitter depletion + u real = U_0 # utilization parameter: fraction of available resources ready for use (release probability) + U real = U_0 # increment of u produced by a spike + + parameters: + d ms = 1 ms # synaptic transmission delay + U_0 real = 0.25 # basal release probability + K_A real = 0.0375 # controls how fast the baseline release probability increases with the activity + tau_D ms = 300 ms # depression time constant + tau_F ms = 1500 ms # facilitation time constant + tau_A ms = 20000 ms # augmentation time constant + tau_filter ms = 50 ms # filtered spike train time constant + + equations: + x' = (1. - x) / tau_D + u' = (U - u) / tau_F + U' = (U_0 - U) / tau_A + + onReceive(pre_spikes): + x -= u * x + u += U * (1. - u) + U += K_A * (1. - U) + + w_effective real = w * x * u + + emit_spike(w_effective, d) + + update: + integrate_odes() \ No newline at end of file diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py new file mode 100644 index 000000000..9c337787b --- /dev/null +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# +# test_synapse_numeric_solver.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +import os + +import matplotlib.pyplot as plt +import nest + +from pynestml.frontend.pynestml_frontend import generate_target, generate_nest_target +import numpy as np + + +class TestSynapseNumericSolver: + """ + Tests a synapse with non-linear dynamics requiring a numeric solver for ODEs. + """ + + def test_synapse_with_numeric_solver(self): + nest.ResetKernel() + nest.set_verbosity("M_WARNING") + dt = 0.1 + nest.resolution = dt + + files = ["models/neurons/iaf_psc_exp_neuron.nestml", "tests/nest_tests/resources/stp_synapse.nestml"] + input_paths = [os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( + os.pardir, os.pardir, s))) for s in files] + target_path = "target_stp" + modulename = "stp_module" + + generate_nest_target(input_path=input_paths, + target_path=target_path, + logging_level="INFO", + suffix="_nestml", + module_name=modulename, + codegen_opts={"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", + "synapse": "stp_synapse"}], + "delay_variable": {"stp_synapse": "d"}, + "weight_variable": {"stp_synapse": "w"}}) + nest.Install(modulename) + + # properties of the generated spike train + frequency = 50 # in Hz + spike_count = 10 + step = 1000. / frequency # in ms + duration = spike_count * step + sim_time = duration + 11_000 + + spike_times = (([i * step for i in range(1, spike_count + 1)] # 10 spikes at 50Hz + + [duration + 500]) # then 500ms after + + [duration + 10_000]) # then 10s after + + # parameters for the spike generator (spike train injector) + params_sg = { + "spike_times": spike_times + } + print(spike_times) + neuron_model = "iaf_psc_exp_neuron_nestml__with_stp_synapse_nestml" + synapse_model = "stp_synapse_nestml__with_iaf_psc_exp_neuron_nestml" + + print("Creating the neuron model") + neuron = nest.Create(neuron_model) + + print("Creating spike generator") + spike_train_injector = nest.Create("spike_train_injector", params=params_sg) + + voltmeter = nest.Create("voltmeter", params={'interval': 0.1}) + spike_recorder = nest.Create("spike_recorder") + + print("Connecting the synapse") + nest.Connect(spike_train_injector, neuron, syn_spec={"synapse_model": synapse_model}) + nest.Connect(voltmeter, neuron) + nest.Connect(spike_train_injector, spike_recorder) + connections = nest.GetConnections(source=spike_train_injector, synapse_model=synapse_model) + x = [] + u = [] + U = [] + sim_step_size = 1. + for i in np.arange(0., sim_time + 0.01, sim_step_size): + nest.Simulate(sim_step_size) + syn_stats = connections.get() # nest.GetConnections()[2].get() + x += [syn_stats["x"]] + u += [syn_stats["u"]] + U += [syn_stats["U"]] + + data_vm = voltmeter.events + data_sr = spike_recorder.events + + fig, ax = plt.subplots(3, 1, sharex=True, figsize=(10, 15)) + + ax[0].vlines(data_sr["times"], 0, 1) + ax[0].set_xlim([0, sim_time]) + ax[0].set_xlabel('Time (s)') + + ax[1].set_xlim([0, sim_time]) + ax[1].set_ylim([0, 1]) + ax[1].set_xlabel('Time (s)') + + ax[1].plot(x, label='x') + ax[1].plot(u, label='u') + ax[1].plot(U, label='U') + ax[1].legend(loc='best') + + ax[2].set_xlim([0, sim_time]) + ax[2].set_xlabel('Time (ms)') + + for ax_ in ax: + ax_.set_xlim([1., sim_time]) + ax_.set_xscale('log') + + ax[2].plot(data_vm["times"], data_vm["V_m"]) + + fig.tight_layout() + fig.savefig('synaug_numsim.pdf') + plt.show() From 18be17facbed5d8ebc92ba9e520ac119164027a0 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Mon, 7 Jul 2025 13:04:50 +0200 Subject: [PATCH 09/19] Fix pycodestyle error --- tests/nest_tests/test_synapse_numeric_solver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py index 9c337787b..bd92d1d90 100644 --- a/tests/nest_tests/test_synapse_numeric_solver.py +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -62,8 +62,8 @@ def test_synapse_with_numeric_solver(self): duration = spike_count * step sim_time = duration + 11_000 - spike_times = (([i * step for i in range(1, spike_count + 1)] # 10 spikes at 50Hz - + [duration + 500]) # then 500ms after + spike_times = (([i * step for i in range(1, spike_count + 1)] # 10 spikes at 50Hz + + [duration + 500]) # then 500ms after + [duration + 10_000]) # then 10s after # parameters for the spike generator (spike train injector) From d3d3a42a1690b08ce234fdd82b472c1a59680f95 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Mon, 7 Jul 2025 15:03:50 +0200 Subject: [PATCH 10/19] Fix tests --- pynestml/codegeneration/nest_code_generator_utils.py | 3 --- pynestml/codegeneration/printers/gsl_variable_printer.py | 3 +++ tests/nest_tests/test_synapse_numeric_solver.py | 4 ++++ 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator_utils.py b/pynestml/codegeneration/nest_code_generator_utils.py index 342c2321e..4ff5c7e9a 100644 --- a/pynestml/codegeneration/nest_code_generator_utils.py +++ b/pynestml/codegeneration/nest_code_generator_utils.py @@ -58,9 +58,6 @@ def print_symbol_origin(cls, variable_symbol: VariableSymbol, variable: ASTVaria if variable_symbol.block_type == BlockType.INTERNALS: return "V_.%s" - if variable_symbol.block_type == BlockType.INPUT: - return "B_.%s" - return "" @classmethod diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py index aa638843c..ff5c93c0f 100644 --- a/pynestml/codegeneration/printers/gsl_variable_printer.py +++ b/pynestml/codegeneration/printers/gsl_variable_printer.py @@ -67,6 +67,9 @@ def print_variable(self, variable: ASTVariable) -> str: # ode_state[] here is---and must be---the state vector supplied by the integrator, not the state vector in the node, node.S_.ode_state[]. return "ode_state[State_::" + CppVariablePrinter._print_cpp_name(variable.get_complete_name()) + "]" + if symbol.is_input(): + return "node.B_." + self._print_buffer_value(variable) + return self._print(variable, symbol, with_origin=self.with_origin) def _print_delay_variable(self, variable: ASTVariable) -> str: diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py index bd92d1d90..7b36d1f28 100644 --- a/tests/nest_tests/test_synapse_numeric_solver.py +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -22,11 +22,15 @@ import matplotlib.pyplot as plt import nest +import pytest +from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_target, generate_nest_target import numpy as np +@pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") class TestSynapseNumericSolver: """ Tests a synapse with non-linear dynamics requiring a numeric solver for ODEs. From 940c9a441b6a0cd3eba709cb0378e139d62d197f Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Mon, 7 Jul 2025 17:03:56 +0200 Subject: [PATCH 11/19] Add synapse model inside a namespace --- .../common/SynapseHeader.h.jinja2 | 137 +++++++++--------- .../GSLDifferentiationFunction.jinja2 | 1 - .../setup/common/ModuleClass.jinja2 | 2 +- .../setup/common/ModuleClassMaster.jinja2 | 2 +- pynestml/utils/ast_utils.py | 4 +- 5 files changed, 74 insertions(+), 72 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 2a120bf35..638d3f2f1 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -115,19 +115,22 @@ namespace {{names_namespace}} {%- endfor %} {%- endif %} } // end namespace; +} // end namespace nest; +namespace {{ synapseName }} +{ {%- if uses_numeric_solver %} {%- for s in utils.create_integrate_odes_combinations(astnode) %} extern "C" inline int {{synapseName}}_dynamics{% if s | length > 0 %}_{{ s }}{% endif %}( double, const double ode_state[], double f[], void* pnode ); {%- endfor %} {%- endif %} -class {{synapseName}}CommonSynapseProperties : public CommonSynapseProperties +class {{synapseName}}CommonSynapseProperties : public nest::CommonSynapseProperties { public: {{synapseName}}CommonSynapseProperties() - : CommonSynapseProperties() + : nest::CommonSynapseProperties() { {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -147,7 +150,7 @@ public: */ void get_status( DictionaryDatum& d ) const { - CommonSynapseProperties::get_status( d ); + nest::CommonSynapseProperties::get_status( d ); {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -165,9 +168,9 @@ public: /** * Set properties from the values given in dictionary. */ - void set_status( const DictionaryDatum& d, ConnectorModel& cm ) + void set_status( const DictionaryDatum& d, nest::ConnectorModel& cm ) { - CommonSynapseProperties::set_status( d, cm ); + nest::CommonSynapseProperties::set_status( d, cm ); {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -185,19 +188,19 @@ public: if ( updateValue< NodeCollectionDatum >( d, names::volume_transmitter, vt_datum ) ) { {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - const size_t tid = kernel().vp_manager.get_thread_id(); + const size_t tid = nest::kernel().vp_manager.get_thread_id(); {%- else %} - const thread tid = kernel().vp_manager.get_thread_id(); + const thread tid = nest::kernel().vp_manager.get_thread_id(); {%- endif %} {%- if nest_version.startswith("v2") %} - Node* vt = kernel().node_manager.get_node( ( *vt_datum )[ 0 ], tid ); + nest::Node* vt = nest::kernel().node_manager.get_node( ( *vt_datum )[ 0 ], tid ); {%- else %} - Node* vt = kernel().node_manager.get_node_or_proxy( ( *vt_datum )[ 0 ], tid ); + nest::Node* vt = nest::kernel().node_manager.get_node_or_proxy( ( *vt_datum )[ 0 ], tid ); {%- endif %} vt_ = dynamic_cast< volume_transmitter* >( vt ); if ( vt_ == nullptr ) { - throw BadProperty( "Neuromodulatory source must be volume transmitter" ); + throw nest::BadProperty( "Neuromodulatory source must be volume transmitter" ); } } {%- endif %} @@ -333,7 +336,7 @@ public: template < typename targetidentifierT > -class {{synapseName}} : public Connection< targetidentifierT > +class {{synapseName}} : public nest::Connection< targetidentifierT > { {%- if paired_neuron_name | length > 0 %} typedef {{ paired_neuron_name }} post_neuron_t; @@ -474,12 +477,12 @@ public: // this line determines which common properties to use typedef {{synapseName}}CommonSynapseProperties CommonPropertiesType; - typedef Connection< targetidentifierT > ConnectionBase; + typedef nest::Connection< targetidentifierT > ConnectionBase; {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - static constexpr ConnectionModelProperties properties = ConnectionModelProperties::HAS_DELAY - | ConnectionModelProperties::IS_PRIMARY | ConnectionModelProperties::SUPPORTS_HPC - | ConnectionModelProperties::SUPPORTS_LBL; + static constexpr nest::ConnectionModelProperties properties = nest::ConnectionModelProperties::HAS_DELAY + | nest::ConnectionModelProperties::IS_PRIMARY | nest::ConnectionModelProperties::SUPPORTS_HPC + | nest::ConnectionModelProperties::SUPPORTS_LBL; {%- endif %} /** @@ -520,116 +523,116 @@ public: using ConnectionBase::get_target; - class ConnTestDummyNode : public ConnTestDummyNodeBase + class ConnTestDummyNode : public nest::ConnTestDummyNodeBase { public: // Ensure proper overriding of overloaded virtual functions. // Return values from functions are ignored. - using ConnTestDummyNodeBase::handles_test_event; + using nest::ConnTestDummyNodeBase::handles_test_event; {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( SpikeEvent&, size_t ) override + handles_test_event( nest::SpikeEvent&, size_t ) override {%- else %} port - handles_test_event( SpikeEvent&, rport ) override + handles_test_event( nest::SpikeEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( RateEvent&, size_t ) override + handles_test_event( nest::RateEvent&, size_t ) override {%- else %} port - handles_test_event( RateEvent&, rport ) override + handles_test_event( nest::RateEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( DataLoggingRequest&, size_t ) override + handles_test_event( nest::DataLoggingRequest&, size_t ) override {%- else %} port - handles_test_event( DataLoggingRequest&, rport ) override + handles_test_event( nest::DataLoggingRequest&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( CurrentEvent&, size_t ) override + handles_test_event( nest::CurrentEvent&, size_t ) override {%- else %} port - handles_test_event( CurrentEvent&, rport ) override + handles_test_event( nest::CurrentEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( ConductanceEvent&, size_t ) override + handles_test_event( nest::ConductanceEvent&, size_t ) override {%- else %} port - handles_test_event( ConductanceEvent&, rport ) override + handles_test_event( nest::ConductanceEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( DoubleDataEvent&, size_t ) override + handles_test_event( nest::DoubleDataEvent&, size_t ) override {%- else %} port - handles_test_event( DoubleDataEvent&, rport ) override + handles_test_event( nest::DoubleDataEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( DSSpikeEvent&, size_t ) override + handles_test_event( nest::DSSpikeEvent&, size_t ) override {%- else %} port - handles_test_event( DSSpikeEvent&, rport ) override + handles_test_event( nest::DSSpikeEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( DSCurrentEvent&, size_t ) override + handles_test_event( nest::DSCurrentEvent&, size_t ) override {%- else %} port - handles_test_event( DSCurrentEvent&, rport ) override + handles_test_event( nest::DSCurrentEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return invalid_port_; + return nest::invalid_port_; {%- else %} - return invalid_port; + return nest::invalid_port; {%- endif %} } }; {%- if synapse_weight_variable | length > 0 and synapse_weight_variable != "weight" %} @@ -643,7 +646,7 @@ public: inline void set_weight(double w) { {%- if isHomogeneous %} - throw BadProperty( + throw nest::BadProperty( "Setting of individual weights is not possible! The common weights can " "be changed via " "CopyModel()." ); @@ -665,14 +668,14 @@ public: {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} void - check_connection( Node& s, - Node& t, + check_connection( nest::Node& s, + nest::Node& t, size_t receptor_type, const CommonPropertiesType& cp ) {%- else %} void - check_connection( Node& s, - Node& t, + check_connection( nest::Node& s, + nest::Node& t, rport receptor_type, const CommonPropertiesType& cp ) {%- endif %} @@ -693,7 +696,7 @@ public: if ( cp.vt_ == nullptr ) { - throw BadProperty( "No volume transmitter has been assigned to the dopamine synapse." ); + throw nest::BadProperty( "No volume transmitter has been assigned to the dopamine synapse." ); } {%- endif %} @@ -740,7 +743,7 @@ void get_entry_from_continuous_variable_history(double t, {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} {%- if not (nest_version.startswith("v3.5") or nest_version.startswith("v3.6")) %} bool - send( Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) + send( nest::Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) {%- else %} void send( Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) @@ -825,7 +828,7 @@ void get_entry_from_continuous_variable_history(double t, const double minus_dt = t_lastspike_ - ( start->t_ + __dendritic_delay ); // get_history() should make sure that ``start->t_ > t_lastspike_ - dendritic_delay``, i.e. minus_dt < 0 - assert( minus_dt < -kernel().connection_manager.get_stdp_eps() ); + assert( minus_dt < -nest::kernel().connection_manager.get_stdp_eps() ); {%- if paired_neuron_name is not none and paired_neuron_name|length > 0 and paired_neuron.state_vars_that_need_continuous_buffering | length > 0 %} /** @@ -1020,7 +1023,7 @@ void get_entry_from_continuous_variable_history(double t, void get_status( DictionaryDatum& d ) const; - void set_status( const DictionaryDatum& d, ConnectorModel& cm ); + void set_status( const DictionaryDatum& d, nest::ConnectorModel& cm ); {%- if norm_rng %} {%- if nest_version.startswith("v2") %} @@ -1042,7 +1045,7 @@ register_{{ synapseName }}( const std::string& name ) {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} template < typename targetidentifierT > -constexpr ConnectionModelProperties {{synapseName}}< targetidentifierT >::properties; +constexpr nest::ConnectionModelProperties {{synapseName}}< targetidentifierT >::properties; {%- endif %} {%- if vt_ports is defined and vt_ports|length > 0 %} @@ -1060,7 +1063,7 @@ void // process dopa spikes in (t0, t1] // propagate weight from t0 to t1 if ( ( vt_spikes.size() > vt_spikes_idx_ + 1 ) - and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * kernel().connection_manager.get_stdp_eps() ) ) + and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ) ) { // there is at least 1 dopa spike in (t0, t1] // propagate up to first dopa spike @@ -1080,7 +1083,7 @@ void // process remaining dopa spikes in (t0, t1] double cd; while ( ( vt_spikes.size() > vt_spikes_idx_ + 1 ) - and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * kernel().connection_manager.get_stdp_eps() ) ) + and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ) ) { // propagate up to next dopa spike update_internal_state_(vt_spikes[ vt_spikes_idx_ ].spike_time_, @@ -1130,7 +1133,7 @@ void {{synapseName}}< targetidentifierT >::get_status( DictionaryDatum& __d ) const { ConnectionBase::get_status( __d ); - def< long >( __d, names::size_of, sizeof( *this ) ); + def< long >( __d, nest::names::size_of, sizeof( *this ) ); // parameters and state variables {%- filter indent(2,True) %} @@ -1140,11 +1143,11 @@ void {%- if not isHomogeneous %} {%- if variable.get_name() == nest_codegen_opt_delay_variable %} {#- special case for NEST special variable delay #} -def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, names::delay, {{ printer.print(variable) }} ); // NEST special case for delay variable +def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::names::delay, {{ printer.print(variable) }} ); // NEST special case for delay variable def(__d, nest::{{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}, {{ printer.print(variable) }}); {#- special case for NEST special variable weight #} {%- elif variable.get_name() == synapse_weight_variable %} -def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, names::weight, {{ printer.print(variable) }} ); // NEST special case for weight variable +def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::names::weight, {{ printer.print(variable) }} ); // NEST special case for weight variable def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::{{ synapseName }}_names::_{{ synapse_weight_variable }}, {{ printer.print(variable) }} ); // NEST special case for weight variable {%- else %} {%- include "directives_cpp/WriteInDictionary.jinja2" %} @@ -1157,19 +1160,19 @@ def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::{{ sy template < typename targetidentifierT > void {{synapseName}}< targetidentifierT >::set_status( const DictionaryDatum& __d, - ConnectorModel& cm ) + nest::ConnectorModel& cm ) { {%- if synapse_weight_variable|length > 0 and synapse_weight_variable != "weight" %} if (__d->known(nest::{{ synapseName }}_names::_{{ synapse_weight_variable }}) and __d->known(nest::names::weight)) { - throw BadProperty( "To prevent inconsistencies, please set either 'weight' or '{{ synapse_weight_variable }}' variable; not both at the same time." ); + throw nest::BadProperty( "To prevent inconsistencies, please set either 'weight' or '{{ synapse_weight_variable }}' variable; not both at the same time." ); } {%- endif %} {%- if nest_codegen_opt_delay_variable != "delay" %} if (__d->known(nest::{{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}) and __d->known(nest::names::delay)) { - throw BadProperty( "To prevent inconsistencies, please set either 'delay' or '{{ nest_codegen_opt_delay_variable }}' variable; not both at the same time." ); + throw nest::BadProperty( "To prevent inconsistencies, please set either 'delay' or '{{ nest_codegen_opt_delay_variable }}' variable; not both at the same time." ); } {%- endif %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 index eb69c9d9e..52641ea04 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/GSLDifferentiationFunction.jinja2 @@ -14,7 +14,6 @@ extern "C" inline int {{modelName}}_dynamics{% if ast.get_args() | length > 0 %} assert( pnode ); const {{neuronName}}& node = *( reinterpret_cast< {{neuronName}}* >( pnode ) ); {%- else %} - typedef nest::State_ State_; // get access to node so we can almost work as in a member function assert( pnode ); const Parameters_& node = *( reinterpret_cast< Parameters_* >( pnode ) ); diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 index 3f6646d42..0a69b9ad8 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 @@ -132,7 +132,7 @@ void {%- if synapses %} // register synapses {%- for synapse in synapses %} - nest::register_connection_model< nest::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); + {{synapse.get_name()}}::register_connection_model< nest::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } // {{moduleName}}::init() diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 index fe2d49582..fb472263f 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 @@ -78,7 +78,7 @@ void {{moduleName}}::initialize() {%- if synapses %} // register synapses {%- for synapse in synapses %} - nest::register_{{synapse.get_name()}}( "{{synapse.get_name()}}" ); + {{synapse.get_name()}}::register_{{synapse.get_name()}}( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } \ No newline at end of file diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 61a509f84..117386709 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2566,8 +2566,8 @@ def visit_variable(self, node): visitor._numeric_state_variables = numeric_state_variable_names model.accept(visitor) - if "moved_spike_updates" in dir(model): - for expr in model.moved_spike_updates: + if "extra_on_emit_spike_stmts_from_synapse" in dir(model): + for expr in model.extra_on_emit_spike_stmts_from_synapse: expr.accept(visitor) if update_expressions: From b3dc52bf3b5a179bbc0504641e79dd2f9ced66d9 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Tue, 8 Jul 2025 14:52:51 +0200 Subject: [PATCH 12/19] Fix test failures --- .../common/SynapseHeader.h.jinja2 | 22 +++++++++---------- .../setup/common/ModuleClass.jinja2 | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 638d3f2f1..8c8d0accc 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -185,7 +185,7 @@ public: {%- if vt_ports is defined and vt_ports|length > 0 %} NodeCollectionDatum vt_datum; - if ( updateValue< NodeCollectionDatum >( d, names::volume_transmitter, vt_datum ) ) + if ( updateValue< NodeCollectionDatum >( d, nest::names::volume_transmitter, vt_datum ) ) { {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} const size_t tid = nest::kernel().vp_manager.get_thread_id(); @@ -197,7 +197,7 @@ public: {%- else %} nest::Node* vt = nest::kernel().node_manager.get_node_or_proxy( ( *vt_datum )[ 0 ], tid ); {%- endif %} - vt_ = dynamic_cast< volume_transmitter* >( vt ); + vt_ = dynamic_cast< nest::volume_transmitter* >( vt ); if ( vt_ == nullptr ) { throw nest::BadProperty( "Neuromodulatory source must be volume transmitter" ); @@ -218,7 +218,7 @@ public: {%- endfor %} {%- if vt_ports is defined and vt_ports|length > 0 %} - volume_transmitter* vt_ = nullptr; + nest::volume_transmitter* vt_ = nullptr; inline long get_vt_node_id() const { @@ -346,12 +346,12 @@ class {{synapseName}} : public nest::Connection< targetidentifierT > public: {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} void trigger_update_weight( size_t t, - const std::vector< spikecounter >& vt_spikes, + const std::vector< nest::spikecounter >& vt_spikes, double t_trig, const {{synapseName}}CommonSynapseProperties& cp ); {%- else %} void trigger_update_weight( thread t, - const std::vector< spikecounter >& vt_spikes, + const std::vector< nest::spikecounter >& vt_spikes, double t_trig, const {{synapseName}}CommonSynapseProperties& cp ); {%- endif %} @@ -505,7 +505,7 @@ public: {%- if vt_ports is defined and vt_ports|length > 0 %} {%- set vt_port = vt_ports[0] %} - void process_{{ vt_port }}_spikes_( const std::vector< spikecounter >& vt_spikes, + void process_{{ vt_port }}_spikes_( const std::vector< nest::spikecounter >& vt_spikes, double t0, double t1, const {{synapseName}}CommonSynapseProperties& cp ); @@ -767,7 +767,7 @@ void get_entry_from_continuous_variable_history(double t, {%- if vt_ports is defined and vt_ports|length > 0 %} // get history of volume transmitter spikes - const std::vector< spikecounter >& vt_spikes = cp.vt_->deliver_spikes(); + const std::vector< nest::spikecounter >& vt_spikes = cp.vt_->deliver_spikes(); {%- endif %} // use accessor functions (inherited from Connection< >) to obtain delay and target @@ -775,7 +775,7 @@ void get_entry_from_continuous_variable_history(double t, {{ paired_neuron_name }}* __target = static_cast< {{ paired_neuron_name }}* >(get_target(tid)); assert(__target); {%- else %} - Node* __target = get_target( tid ); + nest::Node* __target = get_target( tid ); {%- endif %} const double __dendritic_delay = get_delay(); const bool pre_before_post_update = {{pre_before_post_update}}; @@ -1052,7 +1052,7 @@ constexpr nest::ConnectionModelProperties {{synapseName}}< targetidentifierT >:: {%- set vt_port = vt_ports[0] %} template < typename targetidentifierT > void -{{synapseName}}< targetidentifierT >::process_{{vt_port}}_spikes_( const std::vector< spikecounter >& vt_spikes, +{{synapseName}}< targetidentifierT >::process_{{vt_port}}_spikes_( const std::vector< nest::spikecounter >& vt_spikes, double t0, double t1, const {{synapseName}}CommonSynapseProperties& cp ) @@ -1467,12 +1467,12 @@ template < typename targetidentifierT > {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} inline void {{synapseName}}< targetidentifierT >::trigger_update_weight( size_t t, - const std::vector< spikecounter >& vt_spikes, + const std::vector< nest::spikecounter >& vt_spikes, const double t_trig, const CommonPropertiesType& cp ) {%- else %} {{synapseName}}< targetidentifierT >::trigger_update_weight( thread t, - const std::vector< spikecounter >& vt_spikes, + const std::vector< nest::spikecounter >& vt_spikes, const double t_trig, const CommonPropertiesType& cp ) {%- endif %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 index 0a69b9ad8..da7b375dc 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 @@ -132,7 +132,7 @@ void {%- if synapses %} // register synapses {%- for synapse in synapses %} - {{synapse.get_name()}}::register_connection_model< nest::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); + {{synapse.get_name()}}::register_connection_model< {{synapse.get_name()}}::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } // {{moduleName}}::init() From bda28b09b7781f605661d0da66bfdbdba3f8e1f6 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 9 Jul 2025 14:21:55 +0200 Subject: [PATCH 13/19] Modify templates to fix namespaces in synapse models --- .../common/SynapseHeader.h.jinja2 | 202 +++++++++--------- .../setup/common/ModuleClass.jinja2 | 2 +- .../setup/common/ModuleClassMaster.jinja2 | 2 +- 3 files changed, 103 insertions(+), 103 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 8c8d0accc..5cba91889 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -114,8 +114,7 @@ namespace {{names_namespace}} const Name _{{sym.get_symbol_name()}}( "{{sym.get_symbol_name()}}" ); {%- endfor %} {%- endif %} -} // end namespace; -} // end namespace nest; +} // end namespace {{names_namespace}}; namespace {{ synapseName }} { @@ -125,12 +124,12 @@ extern "C" inline int {{synapseName}}_dynamics{% if s | length > 0 %}_{{ s }}{% {%- endfor %} {%- endif %} -class {{synapseName}}CommonSynapseProperties : public nest::CommonSynapseProperties +class {{synapseName}}CommonSynapseProperties : public CommonSynapseProperties { public: {{synapseName}}CommonSynapseProperties() - : nest::CommonSynapseProperties() + : CommonSynapseProperties() { {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -150,7 +149,7 @@ public: */ void get_status( DictionaryDatum& d ) const { - nest::CommonSynapseProperties::get_status( d ); + CommonSynapseProperties::get_status( d ); {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -168,9 +167,9 @@ public: /** * Set properties from the values given in dictionary. */ - void set_status( const DictionaryDatum& d, nest::ConnectorModel& cm ) + void set_status( const DictionaryDatum& d, ConnectorModel& cm ) { - nest::CommonSynapseProperties::set_status( d, cm ); + CommonSynapseProperties::set_status( d, cm ); {%- filter indent(width=8) %} {%- for parameter in synapse.get_parameter_symbols() %} @@ -185,22 +184,22 @@ public: {%- if vt_ports is defined and vt_ports|length > 0 %} NodeCollectionDatum vt_datum; - if ( updateValue< NodeCollectionDatum >( d, nest::names::volume_transmitter, vt_datum ) ) + if ( updateValue< NodeCollectionDatum >( d, names::volume_transmitter, vt_datum ) ) { {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - const size_t tid = nest::kernel().vp_manager.get_thread_id(); + const size_t tid = kernel().vp_manager.get_thread_id(); {%- else %} - const thread tid = nest::kernel().vp_manager.get_thread_id(); + const thread tid = kernel().vp_manager.get_thread_id(); {%- endif %} {%- if nest_version.startswith("v2") %} - nest::Node* vt = nest::kernel().node_manager.get_node( ( *vt_datum )[ 0 ], tid ); + Node* vt = kernel().node_manager.get_node( ( *vt_datum )[ 0 ], tid ); {%- else %} - nest::Node* vt = nest::kernel().node_manager.get_node_or_proxy( ( *vt_datum )[ 0 ], tid ); + Node* vt = kernel().node_manager.get_node_or_proxy( ( *vt_datum )[ 0 ], tid ); {%- endif %} - vt_ = dynamic_cast< nest::volume_transmitter* >( vt ); + vt_ = dynamic_cast< volume_transmitter* >( vt ); if ( vt_ == nullptr ) { - throw nest::BadProperty( "Neuromodulatory source must be volume transmitter" ); + throw BadProperty( "Neuromodulatory source must be volume transmitter" ); } } {%- endif %} @@ -218,7 +217,7 @@ public: {%- endfor %} {%- if vt_ports is defined and vt_ports|length > 0 %} - nest::volume_transmitter* vt_ = nullptr; + volume_transmitter* vt_ = nullptr; inline long get_vt_node_id() const { @@ -336,7 +335,7 @@ public: template < typename targetidentifierT > -class {{synapseName}} : public nest::Connection< targetidentifierT > +class {{synapseName}} : public Connection< targetidentifierT > { {%- if paired_neuron_name | length > 0 %} typedef {{ paired_neuron_name }} post_neuron_t; @@ -346,12 +345,12 @@ class {{synapseName}} : public nest::Connection< targetidentifierT > public: {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} void trigger_update_weight( size_t t, - const std::vector< nest::spikecounter >& vt_spikes, + const std::vector< spikecounter >& vt_spikes, double t_trig, const {{synapseName}}CommonSynapseProperties& cp ); {%- else %} void trigger_update_weight( thread t, - const std::vector< nest::spikecounter >& vt_spikes, + const std::vector< spikecounter >& vt_spikes, double t_trig, const {{synapseName}}CommonSynapseProperties& cp ); {%- endif %} @@ -477,12 +476,12 @@ public: // this line determines which common properties to use typedef {{synapseName}}CommonSynapseProperties CommonPropertiesType; - typedef nest::Connection< targetidentifierT > ConnectionBase; + typedef Connection< targetidentifierT > ConnectionBase; {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} - static constexpr nest::ConnectionModelProperties properties = nest::ConnectionModelProperties::HAS_DELAY - | nest::ConnectionModelProperties::IS_PRIMARY | nest::ConnectionModelProperties::SUPPORTS_HPC - | nest::ConnectionModelProperties::SUPPORTS_LBL; + static constexpr ConnectionModelProperties properties = ConnectionModelProperties::HAS_DELAY + | ConnectionModelProperties::IS_PRIMARY | ConnectionModelProperties::SUPPORTS_HPC + | ConnectionModelProperties::SUPPORTS_LBL; {%- endif %} /** @@ -505,7 +504,7 @@ public: {%- if vt_ports is defined and vt_ports|length > 0 %} {%- set vt_port = vt_ports[0] %} - void process_{{ vt_port }}_spikes_( const std::vector< nest::spikecounter >& vt_spikes, + void process_{{ vt_port }}_spikes_( const std::vector< spikecounter >& vt_spikes, double t0, double t1, const {{synapseName}}CommonSynapseProperties& cp ); @@ -523,116 +522,116 @@ public: using ConnectionBase::get_target; - class ConnTestDummyNode : public nest::ConnTestDummyNodeBase + class ConnTestDummyNode : public ConnTestDummyNodeBase { public: // Ensure proper overriding of overloaded virtual functions. // Return values from functions are ignored. - using nest::ConnTestDummyNodeBase::handles_test_event; + using ConnTestDummyNodeBase::handles_test_event; {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::SpikeEvent&, size_t ) override + handles_test_event( SpikeEvent&, size_t ) override {%- else %} port - handles_test_event( nest::SpikeEvent&, rport ) override + handles_test_event( SpikeEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::RateEvent&, size_t ) override + handles_test_event( RateEvent&, size_t ) override {%- else %} port - handles_test_event( nest::RateEvent&, rport ) override + handles_test_event( RateEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::DataLoggingRequest&, size_t ) override + handles_test_event( DataLoggingRequest&, size_t ) override {%- else %} port - handles_test_event( nest::DataLoggingRequest&, rport ) override + handles_test_event( DataLoggingRequest&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::CurrentEvent&, size_t ) override + handles_test_event( CurrentEvent&, size_t ) override {%- else %} port - handles_test_event( nest::CurrentEvent&, rport ) override + handles_test_event( CurrentEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::ConductanceEvent&, size_t ) override + handles_test_event( ConductanceEvent&, size_t ) override {%- else %} port - handles_test_event( nest::ConductanceEvent&, rport ) override + handles_test_event( ConductanceEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::DoubleDataEvent&, size_t ) override + handles_test_event( DoubleDataEvent&, size_t ) override {%- else %} port - handles_test_event( nest::DoubleDataEvent&, rport ) override + handles_test_event( DoubleDataEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::DSSpikeEvent&, size_t ) override + handles_test_event( DSSpikeEvent&, size_t ) override {%- else %} port - handles_test_event( nest::DSSpikeEvent&, rport ) override + handles_test_event( DSSpikeEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} size_t - handles_test_event( nest::DSCurrentEvent&, size_t ) override + handles_test_event( DSCurrentEvent&, size_t ) override {%- else %} port - handles_test_event( nest::DSCurrentEvent&, rport ) override + handles_test_event( DSCurrentEvent&, rport ) override {%- endif %} { {%- if nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} - return nest::invalid_port_; + return invalid_port_; {%- else %} - return nest::invalid_port; + return invalid_port; {%- endif %} } }; {%- if synapse_weight_variable | length > 0 and synapse_weight_variable != "weight" %} @@ -646,7 +645,7 @@ public: inline void set_weight(double w) { {%- if isHomogeneous %} - throw nest::BadProperty( + throw BadProperty( "Setting of individual weights is not possible! The common weights can " "be changed via " "CopyModel()." ); @@ -668,14 +667,14 @@ public: {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} void - check_connection( nest::Node& s, - nest::Node& t, + check_connection( Node& s, + Node& t, size_t receptor_type, const CommonPropertiesType& cp ) {%- else %} void - check_connection( nest::Node& s, - nest::Node& t, + check_connection( Node& s, + Node& t, rport receptor_type, const CommonPropertiesType& cp ) {%- endif %} @@ -696,7 +695,7 @@ public: if ( cp.vt_ == nullptr ) { - throw nest::BadProperty( "No volume transmitter has been assigned to the dopamine synapse." ); + throw BadProperty( "No volume transmitter has been assigned to the dopamine synapse." ); } {%- endif %} @@ -725,7 +724,7 @@ void get_entry_from_continuous_variable_history(double t, runner = start; while ( runner != finish ) { - if ( fabs( t - runner->t_ ) < nest::kernel().connection_manager.get_stdp_eps() ) + if ( fabs( t - runner->t_ ) < kernel().connection_manager.get_stdp_eps() ) { histentry = *runner; return; @@ -743,7 +742,7 @@ void get_entry_from_continuous_variable_history(double t, {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} {%- if not (nest_version.startswith("v3.5") or nest_version.startswith("v3.6")) %} bool - send( nest::Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) + send( Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) {%- else %} void send( Event& e, const size_t tid, const {{synapseName}}CommonSynapseProperties& cp ) @@ -753,7 +752,7 @@ void get_entry_from_continuous_variable_history(double t, send( Event& e, const thread tid, const {{synapseName}}CommonSynapseProperties& cp ) {%- endif %} { - const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function + const double __timestep = Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function auto get_thread = [tid]() { @@ -767,7 +766,7 @@ void get_entry_from_continuous_variable_history(double t, {%- if vt_ports is defined and vt_ports|length > 0 %} // get history of volume transmitter spikes - const std::vector< nest::spikecounter >& vt_spikes = cp.vt_->deliver_spikes(); + const std::vector< spikecounter >& vt_spikes = cp.vt_->deliver_spikes(); {%- endif %} // use accessor functions (inherited from Connection< >) to obtain delay and target @@ -775,7 +774,7 @@ void get_entry_from_continuous_variable_history(double t, {{ paired_neuron_name }}* __target = static_cast< {{ paired_neuron_name }}* >(get_target(tid)); assert(__target); {%- else %} - nest::Node* __target = get_target( tid ); + Node* __target = get_target( tid ); {%- endif %} const double __dendritic_delay = get_delay(); const bool pre_before_post_update = {{pre_before_post_update}}; @@ -828,7 +827,7 @@ void get_entry_from_continuous_variable_history(double t, const double minus_dt = t_lastspike_ - ( start->t_ + __dendritic_delay ); // get_history() should make sure that ``start->t_ > t_lastspike_ - dendritic_delay``, i.e. minus_dt < 0 - assert( minus_dt < -nest::kernel().connection_manager.get_stdp_eps() ); + assert( minus_dt < -kernel().connection_manager.get_stdp_eps() ); {%- if paired_neuron_name is not none and paired_neuron_name|length > 0 and paired_neuron.state_vars_that_need_continuous_buffering | length > 0 %} /** @@ -1023,13 +1022,13 @@ void get_entry_from_continuous_variable_history(double t, void get_status( DictionaryDatum& d ) const; - void set_status( const DictionaryDatum& d, nest::ConnectorModel& cm ); + void set_status( const DictionaryDatum& d, ConnectorModel& cm ); {%- if norm_rng %} {%- if nest_version.startswith("v2") %} librandom::NormalRandomDev normal_dev_; //!< random deviate generator {%- else %} - nest::normal_distribution normal_dev_; //!< random deviate generator + normal_distribution normal_dev_; //!< random deviate generator {%- endif %} {%- endif %} }; @@ -1039,20 +1038,20 @@ void get_entry_from_continuous_variable_history(double t, void register_{{ synapseName }}( const std::string& name ) { - nest::register_connection_model< {{ synapseName }} >( name ); + register_connection_model< {{ synapseName }} >( name ); } {%- endif %} {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} template < typename targetidentifierT > -constexpr nest::ConnectionModelProperties {{synapseName}}< targetidentifierT >::properties; +constexpr ConnectionModelProperties {{synapseName}}< targetidentifierT >::properties; {%- endif %} {%- if vt_ports is defined and vt_ports|length > 0 %} {%- set vt_port = vt_ports[0] %} template < typename targetidentifierT > void -{{synapseName}}< targetidentifierT >::process_{{vt_port}}_spikes_( const std::vector< nest::spikecounter >& vt_spikes, +{{synapseName}}< targetidentifierT >::process_{{vt_port}}_spikes_( const std::vector< spikecounter >& vt_spikes, double t0, double t1, const {{synapseName}}CommonSynapseProperties& cp ) @@ -1063,7 +1062,7 @@ void // process dopa spikes in (t0, t1] // propagate weight from t0 to t1 if ( ( vt_spikes.size() > vt_spikes_idx_ + 1 ) - and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ) ) + and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * kernel().connection_manager.get_stdp_eps() ) ) { // there is at least 1 dopa spike in (t0, t1] // propagate up to first dopa spike @@ -1083,7 +1082,7 @@ void // process remaining dopa spikes in (t0, t1] double cd; while ( ( vt_spikes.size() > vt_spikes_idx_ + 1 ) - and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ) ) + and ( t1 - vt_spikes[ vt_spikes_idx_ + 1 ].spike_time_ > -1.0 * kernel().connection_manager.get_stdp_eps() ) ) { // propagate up to next dopa spike update_internal_state_(vt_spikes[ vt_spikes_idx_ ].spike_time_, @@ -1133,7 +1132,7 @@ void {{synapseName}}< targetidentifierT >::get_status( DictionaryDatum& __d ) const { ConnectionBase::get_status( __d ); - def< long >( __d, nest::names::size_of, sizeof( *this ) ); + def< long >( __d, names::size_of, sizeof( *this ) ); // parameters and state variables {%- filter indent(2,True) %} @@ -1143,12 +1142,12 @@ void {%- if not isHomogeneous %} {%- if variable.get_name() == nest_codegen_opt_delay_variable %} {#- special case for NEST special variable delay #} -def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::names::delay, {{ printer.print(variable) }} ); // NEST special case for delay variable -def(__d, nest::{{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}, {{ printer.print(variable) }}); +def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, names::delay, {{ printer.print(variable) }} ); // NEST special case for delay variable +def(__d, {{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}, {{ printer.print(variable) }}); {#- special case for NEST special variable weight #} {%- elif variable.get_name() == synapse_weight_variable %} -def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::names::weight, {{ printer.print(variable) }} ); // NEST special case for weight variable -def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::{{ synapseName }}_names::_{{ synapse_weight_variable }}, {{ printer.print(variable) }} ); // NEST special case for weight variable +def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, names::weight, {{ printer.print(variable) }} ); // NEST special case for weight variable +def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, {{ synapseName }}_names::_{{ synapse_weight_variable }}, {{ printer.print(variable) }} ); // NEST special case for weight variable {%- else %} {%- include "directives_cpp/WriteInDictionary.jinja2" %} {%- endif %} @@ -1160,19 +1159,19 @@ def< {{ declarations.print_variable_type(variable_symbol) }} >( __d, nest::{{ sy template < typename targetidentifierT > void {{synapseName}}< targetidentifierT >::set_status( const DictionaryDatum& __d, - nest::ConnectorModel& cm ) + ConnectorModel& cm ) { {%- if synapse_weight_variable|length > 0 and synapse_weight_variable != "weight" %} - if (__d->known(nest::{{ synapseName }}_names::_{{ synapse_weight_variable }}) and __d->known(nest::names::weight)) + if (__d->known({{ synapseName }}_names::_{{ synapse_weight_variable }}) and __d->known(names::weight)) { - throw nest::BadProperty( "To prevent inconsistencies, please set either 'weight' or '{{ synapse_weight_variable }}' variable; not both at the same time." ); + throw BadProperty( "To prevent inconsistencies, please set either 'weight' or '{{ synapse_weight_variable }}' variable; not both at the same time." ); } {%- endif %} {%- if nest_codegen_opt_delay_variable != "delay" %} - if (__d->known(nest::{{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}) and __d->known(nest::names::delay)) + if (__d->known({{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}) and __d->known(names::delay)) { - throw nest::BadProperty( "To prevent inconsistencies, please set either 'delay' or '{{ nest_codegen_opt_delay_variable }}' variable; not both at the same time." ); + throw BadProperty( "To prevent inconsistencies, please set either 'delay' or '{{ nest_codegen_opt_delay_variable }}' variable; not both at the same time." ); } {%- endif %} @@ -1188,17 +1187,17 @@ void {%- if variable.get_name() == nest_codegen_opt_delay_variable %} // special treatment of NEST delay double tmp_{{ nest_codegen_opt_delay_variable }} = get_delay(); -updateValue(__d, nest::{{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}, tmp_{{nest_codegen_opt_delay_variable}}); +updateValue(__d, {{ synapseName }}_names::_{{ nest_codegen_opt_delay_variable }}, tmp_{{nest_codegen_opt_delay_variable}}); {%- elif variable.get_name() == synapse_weight_variable %} // special treatment of NEST weight double tmp_{{ synapse_weight_variable }} = get_weight(); -if (__d->known(nest::{{ synapseName }}_names::_{{ synapse_weight_variable }})) +if (__d->known({{ synapseName }}_names::_{{ synapse_weight_variable }})) { - updateValue(__d, nest::{{ synapseName }}_names::_{{ synapse_weight_variable }}, tmp_{{synapse_weight_variable}}); + updateValue(__d, {{ synapseName }}_names::_{{ synapse_weight_variable }}, tmp_{{synapse_weight_variable}}); } -if (__d->known(nest::names::weight)) +if (__d->known(names::weight)) { - updateValue(__d, nest::names::weight, tmp_{{synapse_weight_variable}}); + updateValue(__d, names::weight, tmp_{{synapse_weight_variable}}); } {%- else %} {%- include "directives_cpp/ReadFromDictionaryToTmp.jinja2" %} @@ -1236,13 +1235,13 @@ set_delay(tmp_{{ nest_codegen_opt_delay_variable }}); {% for invariant in synapse.get_parameter_invariants() %} if ( !({{printer.print(invariant)}}) ) { - throw nest::BadProperty("The constraint '{{nestml_printer.print(invariant)}}' is violated!"); + throw BadProperty("The constraint '{{nestml_printer.print(invariant)}}' is violated!"); } {%- endfor %} {%- endif %} // recompute internal variables in case they are dependent on parameters or state that might have been updated in this call to set_status() - V_.__h = nest::Time::get_resolution().get_ms(); + V_.__h = Time::get_resolution().get_ms(); recompute_internal_variables(); } @@ -1252,7 +1251,7 @@ set_delay(tmp_{{ nest_codegen_opt_delay_variable }}); template < typename targetidentifierT > void {{synapseName}}< targetidentifierT >::recompute_internal_variables() { - const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function + const double __timestep = Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function {% filter indent(2) %} {%- for variable_symbol in synapse.get_internal_symbols() %} @@ -1277,7 +1276,7 @@ std::string {{synapseName}}< targetidentifierT >::get_name() const template < typename targetidentifierT > {{synapseName}}< targetidentifierT >::{{synapseName}}() : ConnectionBase() { - const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function + const double __timestep = Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function // initial values for parameters {%- filter indent(2, True) %} {%- for variable_symbol in synapse.get_parameter_symbols() %} @@ -1295,7 +1294,7 @@ P_.__gsl_rel_error_tol = 1e-6; {%- endif %} {%- endfilter %} - V_.__h = nest::Time::get_resolution().get_ms(); + V_.__h = Time::get_resolution().get_ms(); recompute_internal_variables(); // initial values for state variables @@ -1348,8 +1347,8 @@ P_.__gsl_rel_error_tol = 1e-6; __sys.jacobian = nullptr; __sys.dimension = State_::STATE_VEC_SIZE; __sys.params = reinterpret_cast< void* >( &P_ ); - __step = nest::Time::get_resolution().get_ms(); - __integration_step = nest::Time::get_resolution().get_ms(); + __step = Time::get_resolution().get_ms(); + __integration_step = Time::get_resolution().get_ms(); {%- endif %} t_lastspike_ = 0.; @@ -1467,12 +1466,12 @@ template < typename targetidentifierT > {%- if not (nest_version.startswith("v2") or nest_version.startswith("v3.0") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") or nest_version.startswith("v3.4")) %} inline void {{synapseName}}< targetidentifierT >::trigger_update_weight( size_t t, - const std::vector< nest::spikecounter >& vt_spikes, + const std::vector< spikecounter >& vt_spikes, const double t_trig, const CommonPropertiesType& cp ) {%- else %} {{synapseName}}< targetidentifierT >::trigger_update_weight( thread t, - const std::vector< nest::spikecounter >& vt_spikes, + const std::vector< spikecounter >& vt_spikes, const double t_trig, const CommonPropertiesType& cp ) {%- endif %} @@ -1565,6 +1564,7 @@ inline void {%- endif %} -} // namespace +} // namespace {{ synapseName }}; +} // end namespace nest; #endif /* #ifndef {{synapseName.upper()}}_H */ diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 index da7b375dc..c9beeec58 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 @@ -132,7 +132,7 @@ void {%- if synapses %} // register synapses {%- for synapse in synapses %} - {{synapse.get_name()}}::register_connection_model< {{synapse.get_name()}}::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); + nest::{{synapse.get_name()}}::register_connection_model< nest::{{synapse.get_name()}}::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } // {{moduleName}}::init() diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 index fb472263f..197fbe617 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 @@ -78,7 +78,7 @@ void {{moduleName}}::initialize() {%- if synapses %} // register synapses {%- for synapse in synapses %} - {{synapse.get_name()}}::register_{{synapse.get_name()}}( "{{synapse.get_name()}}" ); + nest::{{synapse.get_name()}}::register_{{synapse.get_name()}}( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } \ No newline at end of file From 6372da33e05bf14e45d3af38e9eac625748b0573 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 9 Jul 2025 15:22:03 +0200 Subject: [PATCH 14/19] Fix templates --- .../resources_nest/point_neuron/setup/common/ModuleClass.jinja2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 index c9beeec58..3f6646d42 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 @@ -132,7 +132,7 @@ void {%- if synapses %} // register synapses {%- for synapse in synapses %} - nest::{{synapse.get_name()}}::register_connection_model< nest::{{synapse.get_name()}}::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); + nest::register_connection_model< nest::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } // {{moduleName}}::init() From 637eb6c8ad30ceba2f7dcecfa094eee8b14a639f Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 9 Jul 2025 15:40:19 +0200 Subject: [PATCH 15/19] Add copyright header --- .../setup/common/ModuleClassMaster.jinja2 | 2 +- tests/nest_tests/resources/stp_synapse.nestml | 32 ++++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 index 197fbe617..a43c3912c 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClassMaster.jinja2 @@ -81,4 +81,4 @@ void {{moduleName}}::initialize() nest::{{synapse.get_name()}}::register_{{synapse.get_name()}}( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} -} \ No newline at end of file +} diff --git a/tests/nest_tests/resources/stp_synapse.nestml b/tests/nest_tests/resources/stp_synapse.nestml index 70c50eb74..b86f055bb 100644 --- a/tests/nest_tests/resources/stp_synapse.nestml +++ b/tests/nest_tests/resources/stp_synapse.nestml @@ -1,3 +1,33 @@ +# stp_synapse.nestml +# ################## +# +# +# Description +# +++++++++++ +# +# This model is used to test vector operations with NEST. +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + model stp_synapse: input: pre_spikes <- spike @@ -35,4 +65,4 @@ model stp_synapse: emit_spike(w_effective, d) update: - integrate_odes() \ No newline at end of file + integrate_odes() From 7a85ffdf243dc53ea8bdc013faed8025be7dd2c5 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Thu, 10 Jul 2025 10:54:29 +0200 Subject: [PATCH 16/19] Fix test failure --- .../resources_nest/point_neuron/common/SynapseHeader.h.jinja2 | 2 +- .../resources_nest/point_neuron/setup/common/ModuleClass.jinja2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 5f9f131cc..8a49d7bd8 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -1397,7 +1397,7 @@ template < typename targetidentifierT > {%- for variable_symbol in synapse.get_state_symbols() %} {%- set variable = utils.get_state_variable_by_name(astnode, variable_symbol.get_symbol_name()) %} {%- if variable.get_name() != synapse_weight_variable and variable.get_name() != nest_codegen_opt_delay_variable %} - S_.{{ printer_no_origin.print(variable) }} = rhs.S_.{{ printer_no_origin.print(variable) }}; + {{ nest_codegen_utils.print_symbol_origin(variable_symbol, variable) % printer_no_origin.print(variable) }} = rhs.{{ nest_codegen_utils.print_symbol_origin(variable_symbol, variable) % printer_no_origin.print(variable) }}; {%- endif %} {%- endfor %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 index 3f6646d42..f9175e2c9 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/common/ModuleClass.jinja2 @@ -132,7 +132,7 @@ void {%- if synapses %} // register synapses {%- for synapse in synapses %} - nest::register_connection_model< nest::{{synapse.get_name()}} >( "{{synapse.get_name()}}" ); + nest::register_connection_model< nest::{{synapse.get_name()}}::{{ synapse.get_name() }} >( "{{synapse.get_name()}}" ); {%- endfor %} {%- endif %} } // {{moduleName}}::init() From 4f9b327d41f63039102e5ee9148c138ee1f1f9ea Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Thu, 10 Jul 2025 17:08:04 +0200 Subject: [PATCH 17/19] Add test --- .../resources/non_linear_synapse.nestml | 33 +++++++ .../nest_tests/test_synapse_numeric_solver.py | 99 +++++++++++++++---- 2 files changed, 112 insertions(+), 20 deletions(-) create mode 100644 tests/nest_tests/resources/non_linear_synapse.nestml diff --git a/tests/nest_tests/resources/non_linear_synapse.nestml b/tests/nest_tests/resources/non_linear_synapse.nestml new file mode 100644 index 000000000..5e11ba262 --- /dev/null +++ b/tests/nest_tests/resources/non_linear_synapse.nestml @@ -0,0 +1,33 @@ +model non_linear_synapse: + state: + x real = 1. + y real = 1. + z real = 1. + w real = 0. + d ms = 1.0 ms + + equations: + x' = (sigma * (y - x)) / ms + y' = (x * (rho - z) - y) / ms + z' = (x * y - beta * z) / ms + + parameters: + sigma real = 10. + beta real = 8/3 + rho real = 28 + + input: + pre_spikes <- spike + + output: + spike(weight real, delay ms) + + onReceive(pre_spikes): + x += 1 + y += 1 + z += 1 + w += x * y / z + emit_spike(w, d) + + update: + integrate_odes() diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py index 7b36d1f28..4ffa26899 100644 --- a/tests/nest_tests/test_synapse_numeric_solver.py +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -28,6 +28,15 @@ from pynestml.frontend.pynestml_frontend import generate_target, generate_nest_target import numpy as np +try: + import matplotlib + matplotlib.use("Agg") + import matplotlib.ticker + import matplotlib.pyplot as plt + TEST_PLOTS = True +except Exception: + TEST_PLOTS = False + @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), reason="This test does not support NEST 2") @@ -106,30 +115,80 @@ def test_synapse_with_numeric_solver(self): data_vm = voltmeter.events data_sr = spike_recorder.events - fig, ax = plt.subplots(3, 1, sharex=True, figsize=(10, 15)) + # TODO: add assertions + + if TEST_PLOTS: + fig, ax = plt.subplots(3, 1, sharex=True, figsize=(10, 15)) + + ax[0].vlines(data_sr["times"], 0, 1) + ax[0].set_xlim([0, sim_time]) + ax[0].set_xlabel('Time (s)') + + ax[1].set_xlim([0, sim_time]) + ax[1].set_ylim([0, 1]) + ax[1].set_xlabel('Time (s)') - ax[0].vlines(data_sr["times"], 0, 1) - ax[0].set_xlim([0, sim_time]) - ax[0].set_xlabel('Time (s)') + ax[1].plot(x, label='x') + ax[1].plot(u, label='u') + ax[1].plot(U, label='U') + ax[1].legend(loc='best') - ax[1].set_xlim([0, sim_time]) - ax[1].set_ylim([0, 1]) - ax[1].set_xlabel('Time (s)') + ax[2].set_xlim([0, sim_time]) + ax[2].set_xlabel('Time (ms)') - ax[1].plot(x, label='x') - ax[1].plot(u, label='u') - ax[1].plot(U, label='U') - ax[1].legend(loc='best') + for ax_ in ax: + ax_.set_xlim([1., sim_time]) + ax_.set_xscale('log') - ax[2].set_xlim([0, sim_time]) - ax[2].set_xlabel('Time (ms)') + ax[2].plot(data_vm["times"], data_vm["V_m"]) - for ax_ in ax: - ax_.set_xlim([1., sim_time]) - ax_.set_xscale('log') + fig.tight_layout() + fig.savefig('synaug_numsim.pdf') - ax[2].plot(data_vm["times"], data_vm["V_m"]) + def test_non_linear_synapse(self): + nest.ResetKernel() + nest.set_verbosity("M_WARNING") + dt = 0.1 + nest.resolution = dt + sim_time = 8.0 + + files = ["models/neurons/iaf_psc_exp_neuron.nestml", "tests/nest_tests/resources/non_linear_synapse.nestml"] + input_paths = [os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( + os.pardir, os.pardir, s))) for s in files] + target_path = "target_nl" + modulename = "nl_syn_module" + + generate_nest_target(input_path=input_paths, + target_path=target_path, + logging_level="INFO", + suffix="_nestml", + module_name=modulename, + codegen_opts={"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", + "synapse": "non_linear_synapse"}], + "delay_variable": {"non_linear_synapse": "d"}, + "weight_variable": {"non_linear_synapse": "w"}}) + nest.Install(modulename) + + neuron_model = "iaf_psc_exp_neuron_nestml__with_non_linear_synapse_nestml" + synapse_model = "non_linear_synapse_nestml__with_iaf_psc_exp_neuron_nestml" + + neuron = nest.Create(neuron_model) + sg = nest.Create("spike_generator", params={"spike_times": [3.0, 5.0, 7.0]}) + + nest.Connect(sg, neuron, syn_spec={"synapse_model": synapse_model}) + connections = nest.GetConnections(source=sg, synapse_model=synapse_model) + x = [] + y = [] + z = [] + w = [] + sim_step_size = 1. + for i in np.arange(0., sim_time + 0.01, sim_step_size): + nest.Simulate(sim_step_size) + syn_stats = connections.get() # nest.GetConnections()[2].get() + x += [syn_stats["x"]] + y += [syn_stats["y"]] + z += [syn_stats["z"]] + w += [syn_stats["w"]] - fig.tight_layout() - fig.savefig('synaug_numsim.pdf') - plt.show() + print(x, y, z, w) + # TODO: add assertions From 0739a2e646f64df85224676b405f5af217e75d46 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 16 Jul 2025 15:31:09 +0200 Subject: [PATCH 18/19] Modify test --- .../synapse_post_neuron_transformer.py | 2 +- .../resources/non_linear_synapse.nestml | 3 - .../nest_tests/test_synapse_numeric_solver.py | 75 ++++++++++++++++--- 3 files changed, 67 insertions(+), 13 deletions(-) diff --git a/pynestml/transformers/synapse_post_neuron_transformer.py b/pynestml/transformers/synapse_post_neuron_transformer.py index 95d7e087e..73ed81390 100644 --- a/pynestml/transformers/synapse_post_neuron_transformer.py +++ b/pynestml/transformers/synapse_post_neuron_transformer.py @@ -294,7 +294,7 @@ def transform_neuron_synapse_pair_(self, neuron: ASTModel, synapse: ASTModel): strictly_synaptic_vars = ["t"] # "seed" this with the predefined variable t if self.option_exists("strictly_synaptic_vars") and removesuffix(synapse.get_name(), FrontendConfiguration.suffix) in self.get_option("strictly_synaptic_vars").keys() and self.get_option("strictly_synaptic_vars")[removesuffix(synapse.get_name(), FrontendConfiguration.suffix)]: - strictly_synaptic_vars.append(self.get_option("strictly_synaptic_vars")[removesuffix(synapse.get_name(), FrontendConfiguration.suffix)]) + strictly_synaptic_vars.extend(self.get_option("strictly_synaptic_vars")[removesuffix(synapse.get_name(), FrontendConfiguration.suffix)]) if self.option_exists("delay_variable") and removesuffix(synapse.get_name(), FrontendConfiguration.suffix) in self.get_option("delay_variable").keys() and self.get_option("delay_variable")[removesuffix(synapse.get_name(), FrontendConfiguration.suffix)]: strictly_synaptic_vars.append(self.get_option("delay_variable")[removesuffix(synapse.get_name(), FrontendConfiguration.suffix)]) diff --git a/tests/nest_tests/resources/non_linear_synapse.nestml b/tests/nest_tests/resources/non_linear_synapse.nestml index 5e11ba262..76f4985cf 100644 --- a/tests/nest_tests/resources/non_linear_synapse.nestml +++ b/tests/nest_tests/resources/non_linear_synapse.nestml @@ -23,9 +23,6 @@ model non_linear_synapse: spike(weight real, delay ms) onReceive(pre_spikes): - x += 1 - y += 1 - z += 1 w += x * y / z emit_spike(w, d) diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py index 4ffa26899..79bd602c3 100644 --- a/tests/nest_tests/test_synapse_numeric_solver.py +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -19,10 +19,9 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . import os - -import matplotlib.pyplot as plt import nest import pytest +from scipy.integrate import solve_ivp from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_target, generate_nest_target @@ -30,9 +29,11 @@ try: import matplotlib + matplotlib.use("Agg") import matplotlib.ticker import matplotlib.pyplot as plt + TEST_PLOTS = True except Exception: TEST_PLOTS = False @@ -145,12 +146,54 @@ def test_synapse_with_numeric_solver(self): fig.tight_layout() fig.savefig('synaug_numsim.pdf') + def lorenz_attractor_system(self, t, state, sigma, rho, beta): + x, y, z = state + dxdt = (sigma * (y - x)) + dydt = (x * (rho - z) - y) + dzdt = (x * y - beta * z) + return [dxdt, dydt, dzdt] + + def evaluate_odes_scipy(self, sigma, rho, beta, initial_state, spike_times, sim_time): + x_arr = [] + y_arr = [] + z_arr = [] + y0 = initial_state + + t_last_spike = 0. + spike_idx = 0 + for i in np.arange(1., sim_time + 0.01, 1.0): + if spike_idx < len(spike_times) and i == spike_times[spike_idx]: + t_spike = spike_times[spike_idx] + t_span = (t_last_spike, t_spike) + print("Integrating over the iterval: ", t_span) + # Solve using RK45 + solution = solve_ivp( + fun=self.lorenz_attractor_system, + t_span=t_span, + y0=y0, # [x_arr[-1], y_arr[-1], z_arr[-1]], + args=(sigma, rho, beta), + method='RK45', + first_step=0.1, + rtol=1e-6, # relative tolerance + atol=1e-6 # absolute tolerance + ) + y0 = solution.y[:, -1] + t_last_spike = t_spike + spike_idx += 1 + + x_arr += [y0[0]] + y_arr += [y0[1]] + z_arr += [y0[2]] + + return x_arr, y_arr, z_arr + def test_non_linear_synapse(self): nest.ResetKernel() nest.set_verbosity("M_WARNING") dt = 0.1 nest.resolution = dt sim_time = 8.0 + spike_times = [3.0, 5.0, 7.0] files = ["models/neurons/iaf_psc_exp_neuron.nestml", "tests/nest_tests/resources/non_linear_synapse.nestml"] input_paths = [os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( @@ -166,29 +209,43 @@ def test_non_linear_synapse(self): codegen_opts={"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", "synapse": "non_linear_synapse"}], "delay_variable": {"non_linear_synapse": "d"}, - "weight_variable": {"non_linear_synapse": "w"}}) + "weight_variable": {"non_linear_synapse": "w"}, + "strictly_synaptic_vars": {"non_linear_synapse": ["x", "y", "z"]}}) nest.Install(modulename) neuron_model = "iaf_psc_exp_neuron_nestml__with_non_linear_synapse_nestml" synapse_model = "non_linear_synapse_nestml__with_iaf_psc_exp_neuron_nestml" neuron = nest.Create(neuron_model) - sg = nest.Create("spike_generator", params={"spike_times": [3.0, 5.0, 7.0]}) + sg = nest.Create("spike_generator", params={"spike_times": spike_times}) nest.Connect(sg, neuron, syn_spec={"synapse_model": synapse_model}) connections = nest.GetConnections(source=sg, synapse_model=synapse_model) + + # Get the parameter values + sigma = connections.get("sigma") + rho = connections.get("rho") + beta = connections.get("beta") + + # Initial values of state variables + inital_state = [connections.get("x"), connections.get("y"), connections.get("z")] + + # Scipy simulation + x_expected, y_expected, z_expected = self.evaluate_odes_scipy(sigma, rho, beta, inital_state, spike_times, sim_time) + + # NEST simulation x = [] y = [] z = [] - w = [] sim_step_size = 1. - for i in np.arange(0., sim_time + 0.01, sim_step_size): + for i in np.arange(0., sim_time, sim_step_size): nest.Simulate(sim_step_size) syn_stats = connections.get() # nest.GetConnections()[2].get() x += [syn_stats["x"]] y += [syn_stats["y"]] z += [syn_stats["z"]] - w += [syn_stats["w"]] - print(x, y, z, w) - # TODO: add assertions + #TODO: Adjust tolerance + np.testing.assert_allclose(x, x_expected, atol=1e-2, rtol=1e-2) + np.testing.assert_allclose(y, y_expected, atol=1e-2, rtol=1e-2) + np.testing.assert_allclose(z, z_expected, atol=1e-2, rtol=1e-2) From 65715a162cdef738c43396eff6632445f708f952 Mon Sep 17 00:00:00 2001 From: Pooja Babu Date: Wed, 16 Jul 2025 15:34:13 +0200 Subject: [PATCH 19/19] Fix pycodestyle --- tests/nest_tests/test_synapse_numeric_solver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/nest_tests/test_synapse_numeric_solver.py b/tests/nest_tests/test_synapse_numeric_solver.py index 79bd602c3..e0597d0ea 100644 --- a/tests/nest_tests/test_synapse_numeric_solver.py +++ b/tests/nest_tests/test_synapse_numeric_solver.py @@ -245,7 +245,7 @@ def test_non_linear_synapse(self): y += [syn_stats["y"]] z += [syn_stats["z"]] - #TODO: Adjust tolerance + # TODO: Adjust tolerance np.testing.assert_allclose(x, x_expected, atol=1e-2, rtol=1e-2) np.testing.assert_allclose(y, y_expected, atol=1e-2, rtol=1e-2) np.testing.assert_allclose(z, z_expected, atol=1e-2, rtol=1e-2)