Skip to main content

sc_neurocore_engine/neurons/
simple_spiking.rs

1// SPDX-License-Identifier: AGPL-3.0-or-later
2// Commercial license available
3// © Concepts 1996–2026 Miroslav Šotek. All rights reserved.
4// © Code 2020–2026 Miroslav Šotek. All rights reserved.
5// ORCID: 0009-0009-3560-0851
6// Contact: www.anulum.li | protoscience@anulum.li
7// SC-NeuroCore — Two-dimensional and higher spiking neuron models
8
9//! Two-dimensional and higher spiking neuron models.
10
11use rand::{RngExt, SeedableRng};
12use rand_xoshiro::Xoshiro256PlusPlus;
13
14/// FitzHugh-Nagumo 1961 — 2D qualitative spike model.
15#[derive(Clone, Debug)]
16pub struct FitzHughNagumoNeuron {
17    pub v: f64,
18    pub w: f64,
19    pub a: f64,
20    pub b: f64,
21    pub epsilon: f64,
22    pub dt: f64,
23    pub v_threshold: f64,
24}
25
26impl FitzHughNagumoNeuron {
27    pub fn new() -> Self {
28        Self {
29            v: -1.0,
30            w: -0.5,
31            a: 0.7,
32            b: 0.8,
33            epsilon: 0.08,
34            dt: 0.1,
35            v_threshold: 1.0,
36        }
37    }
38    pub fn step(&mut self, current: f64) -> i32 {
39        let v_prev = self.v;
40        // FitzHugh 1961: simultaneous Euler (both derivatives use old state)
41        let dv = (self.v - self.v.powi(3) / 3.0 - self.w + current) * self.dt;
42        let dw = self.epsilon * (self.v + self.a - self.b * self.w) * self.dt;
43        self.v += dv;
44        self.w += dw;
45        if self.v >= self.v_threshold && v_prev < self.v_threshold {
46            1
47        } else {
48            0
49        }
50    }
51    pub fn reset(&mut self) {
52        self.v = -1.0;
53        self.w = -0.5;
54    }
55}
56impl Default for FitzHughNagumoNeuron {
57    fn default() -> Self {
58        Self::new()
59    }
60}
61
62/// Morris-Lecar 1981 — barnacle muscle fiber.
63#[derive(Clone, Debug)]
64pub struct MorrisLecarNeuron {
65    pub v: f64,
66    pub w: f64,
67    pub c_m: f64,
68    pub g_ca: f64,
69    pub g_k: f64,
70    pub g_l: f64,
71    pub e_ca: f64,
72    pub e_k: f64,
73    pub e_l: f64,
74    pub v1: f64,
75    pub v2: f64,
76    pub v3: f64,
77    pub v4: f64,
78    pub phi: f64,
79    pub dt: f64,
80    pub v_threshold: f64,
81}
82
83impl MorrisLecarNeuron {
84    pub fn new() -> Self {
85        Self {
86            v: -60.0,
87            w: 0.0,
88            c_m: 20.0,
89            g_ca: 4.0,
90            g_k: 8.0,
91            g_l: 2.0,
92            e_ca: 120.0,
93            e_k: -84.0,
94            e_l: -60.0,
95            v1: -1.2,
96            v2: 18.0,
97            v3: 12.0,
98            v4: 17.4,
99            phi: 1.0 / 15.0,
100            dt: 0.1,
101            v_threshold: 0.0,
102        }
103    }
104    pub fn step(&mut self, current: f64) -> i32 {
105        let v_prev = self.v;
106        let m_inf = 0.5 * (1.0 + ((self.v - self.v1) / self.v2).tanh());
107        let w_inf = 0.5 * (1.0 + ((self.v - self.v3) / self.v4).tanh());
108        let lam = self.phi * ((self.v - self.v3) / (2.0 * self.v4)).cosh();
109        let i_ca = self.g_ca * m_inf * (self.v - self.e_ca);
110        let i_k = self.g_k * self.w * (self.v - self.e_k);
111        let i_l = self.g_l * (self.v - self.e_l);
112        self.v += (-i_ca - i_k - i_l + current) / self.c_m * self.dt;
113        self.w += lam * (w_inf - self.w) * self.dt;
114        if self.v >= self.v_threshold && v_prev < self.v_threshold {
115            1
116        } else {
117            0
118        }
119    }
120    pub fn reset(&mut self) {
121        self.v = -60.0;
122        self.w = 0.0;
123    }
124}
125impl Default for MorrisLecarNeuron {
126    fn default() -> Self {
127        Self::new()
128    }
129}
130
131/// Hindmarsh-Rose 1984 — 3D chaotic bursting model.
132#[derive(Clone, Debug)]
133pub struct HindmarshRoseNeuron {
134    pub x: f64,
135    pub y: f64,
136    pub z: f64,
137    pub b: f64,
138    pub r: f64,
139    pub s: f64,
140    pub x_rest: f64,
141    pub dt: f64,
142    pub x_threshold: f64,
143}
144
145impl HindmarshRoseNeuron {
146    pub fn new() -> Self {
147        Self {
148            x: -1.6,
149            y: -10.0,
150            z: 2.0,
151            b: 3.0,
152            r: 0.001,
153            s: 4.0,
154            x_rest: -1.6,
155            dt: 0.1,
156            x_threshold: 1.0,
157        }
158    }
159    pub fn step(&mut self, current: f64) -> i32 {
160        let x_prev = self.x;
161        let dx = (self.y - self.x.powi(3) + self.b * self.x.powi(2) - self.z + current) * self.dt;
162        let dy = (1.0 - 5.0 * self.x.powi(2) - self.y) * self.dt;
163        let dz = self.r * (self.s * (self.x - self.x_rest) - self.z) * self.dt;
164        self.x += dx;
165        self.y += dy;
166        self.z += dz;
167        if self.x >= self.x_threshold && x_prev < self.x_threshold {
168            1
169        } else {
170            0
171        }
172    }
173    pub fn reset(&mut self) {
174        self.x = -1.6;
175        self.y = -10.0;
176        self.z = 2.0;
177    }
178}
179impl Default for HindmarshRoseNeuron {
180    fn default() -> Self {
181        Self::new()
182    }
183}
184
185/// Resonate-and-Fire — damped oscillator with threshold. Izhikevich 2001.
186#[derive(Clone, Debug)]
187pub struct ResonateAndFireNeuron {
188    pub x: f64,
189    pub y: f64,
190    pub b: f64,
191    pub omega: f64,
192    pub threshold: f64,
193    pub dt: f64,
194}
195
196impl ResonateAndFireNeuron {
197    pub fn new() -> Self {
198        Self {
199            x: 0.0,
200            y: 0.0,
201            b: -0.1,
202            omega: 1.0,
203            threshold: 1.0,
204            dt: 0.05,
205        }
206    }
207    pub fn step(&mut self, current: f64) -> i32 {
208        // Izhikevich 2001: simultaneous Euler (both derivatives use old state)
209        let dx = (self.b * self.x - self.omega * self.y + current) * self.dt;
210        let dy = (self.omega * self.x + self.b * self.y) * self.dt;
211        self.x += dx;
212        self.y += dy;
213        let r = (self.x * self.x + self.y * self.y).sqrt();
214        if r >= self.threshold {
215            self.x = 0.0;
216            self.y = 0.0;
217            1
218        } else {
219            0
220        }
221    }
222    pub fn reset(&mut self) {
223        self.x = 0.0;
224        self.y = 0.0;
225    }
226}
227impl Default for ResonateAndFireNeuron {
228    fn default() -> Self {
229        Self::new()
230    }
231}
232
233/// Balanced Resonate-and-Fire — divergence-bound RF with smooth refractory
234/// reset. Higuchi, Kairat, Bohte, and Otte 2024, Algorithm 1.
235#[derive(Clone, Debug)]
236pub struct BalancedResonateAndFireNeuron {
237    pub x: f64,
238    pub y: f64,
239    pub q: f64,
240    pub omega: f64,
241    pub b_offset: f64,
242    pub threshold: f64,
243    pub gamma: f64,
244    pub dt: f64,
245}
246
247pub fn brf_sustain_oscillation_boundary(omega: f64, dt: f64) -> f64 {
248    let scaled = dt * omega;
249    (-1.0 + (1.0 - scaled * scaled).max(0.0).sqrt()) / dt
250}
251
252impl BalancedResonateAndFireNeuron {
253    pub fn new() -> Self {
254        Self {
255            x: 0.0,
256            y: 0.0,
257            q: 0.0,
258            omega: 10.0,
259            b_offset: 1.0,
260            threshold: 1.0,
261            gamma: 0.9,
262            dt: 0.01,
263        }
264    }
265
266    pub fn p_omega(&self) -> f64 {
267        brf_sustain_oscillation_boundary(self.omega, self.dt)
268    }
269
270    pub fn damping(&self) -> f64 {
271        self.p_omega() - self.b_offset - self.q
272    }
273
274    pub fn dynamic_threshold(&self) -> f64 {
275        self.threshold + self.q
276    }
277
278    pub fn step(&mut self, current: f64) -> i32 {
279        if !(self.dt.is_finite()
280            && self.omega.is_finite()
281            && self.dt > 0.0
282            && self.omega > 0.0
283            && self.dt * self.omega <= 1.0)
284        {
285            return 0;
286        }
287        let b_t = self.damping();
288        let theta_t = self.dynamic_threshold();
289        let x_prev = self.x;
290        let y_prev = self.y;
291        self.x = x_prev + self.dt * (b_t * x_prev - self.omega * y_prev + current);
292        self.y = y_prev + self.dt * (self.omega * x_prev + b_t * y_prev);
293        let spike = if self.x >= theta_t { 1 } else { 0 };
294        self.q = self.gamma * self.q + spike as f64;
295        spike
296    }
297
298    pub fn reset(&mut self) {
299        self.x = 0.0;
300        self.y = 0.0;
301        self.q = 0.0;
302    }
303}
304impl Default for BalancedResonateAndFireNeuron {
305    fn default() -> Self {
306        Self::new()
307    }
308}
309
310/// FitzHugh-Rinzel — 3D extension with slow bursting variable.
311#[derive(Clone, Debug)]
312pub struct FitzHughRinzelNeuron {
313    pub v: f64,
314    pub w: f64,
315    pub y: f64,
316    pub a: f64,
317    pub b: f64,
318    pub c: f64,
319    pub d: f64,
320    pub delta: f64,
321    pub mu: f64,
322    pub dt: f64,
323    pub v_threshold: f64,
324}
325
326impl FitzHughRinzelNeuron {
327    pub fn new() -> Self {
328        Self {
329            v: -1.0,
330            w: -0.5,
331            y: 0.0,
332            a: 0.7,
333            b: 0.8,
334            c: -0.775,
335            d: 1.0,
336            delta: 0.08,
337            mu: 0.0001,
338            dt: 0.1,
339            v_threshold: 1.0,
340        }
341    }
342    pub fn step(&mut self, current: f64) -> i32 {
343        let v_prev = self.v;
344        // FitzHugh-Rinzel: simultaneous Euler (all derivatives use old state)
345        let dv = (self.v - self.v.powi(3) / 3.0 - self.w + self.y + current) * self.dt;
346        let dw = self.delta * (self.a + self.v - self.b * self.w) * self.dt;
347        let dy = self.mu * (self.c - self.v - self.d * self.y) * self.dt;
348        self.v += dv;
349        self.w += dw;
350        self.y += dy;
351        if self.v >= self.v_threshold && v_prev < self.v_threshold {
352            1
353        } else {
354            0
355        }
356    }
357    pub fn reset(&mut self) {
358        self.v = -1.0;
359        self.w = -0.5;
360        self.y = 0.0;
361    }
362}
363impl Default for FitzHughRinzelNeuron {
364    fn default() -> Self {
365        Self::new()
366    }
367}
368
369/// McKean 1970 — piecewise-linear FHN variant.
370#[derive(Clone, Debug)]
371pub struct McKeanNeuron {
372    pub v: f64,
373    pub w: f64,
374    pub a: f64,
375    pub epsilon: f64,
376    pub gamma: f64,
377    pub dt: f64,
378    pub v_peak: f64,
379}
380
381impl McKeanNeuron {
382    pub fn new() -> Self {
383        Self {
384            v: 0.0,
385            w: 0.0,
386            a: 0.25,
387            epsilon: 0.01,
388            gamma: 0.5,
389            dt: 0.1,
390            v_peak: 0.8,
391        }
392    }
393    fn f_v(&self, v: f64) -> f64 {
394        let half_a = self.a / 2.0;
395        let mid = (1.0 + self.a) / 2.0;
396        if v < half_a {
397            -v
398        } else if v < mid {
399            v - self.a
400        } else {
401            1.0 - v
402        }
403    }
404    pub fn step(&mut self, current: f64) -> i32 {
405        let v_prev = self.v;
406        // McKean: simultaneous Euler (both derivatives use old state)
407        let dv = (self.f_v(self.v) - self.w + current) * self.dt;
408        let dw = self.epsilon * (self.v - self.gamma * self.w) * self.dt;
409        self.v += dv;
410        self.w += dw;
411        if self.v >= self.v_peak && v_prev < self.v_peak {
412            1
413        } else {
414            0
415        }
416    }
417    pub fn reset(&mut self) {
418        self.v = 0.0;
419        self.w = 0.0;
420    }
421}
422impl Default for McKeanNeuron {
423    fn default() -> Self {
424        Self::new()
425    }
426}
427
428/// Terman-Wang oscillator — segmentation by oscillatory correlation.
429#[derive(Clone, Debug)]
430pub struct TermanWangOscillator {
431    pub v: f64,
432    pub w: f64,
433    pub alpha: f64,
434    pub beta: f64,
435    pub epsilon: f64,
436    pub rho: f64,
437    pub dt: f64,
438    pub v_peak: f64,
439}
440
441impl TermanWangOscillator {
442    pub fn new() -> Self {
443        Self {
444            v: -1.5,
445            w: -0.5,
446            alpha: 3.0,
447            beta: 0.2,
448            epsilon: 0.02,
449            rho: 0.0,
450            dt: 0.05,
451            v_peak: 1.5,
452        }
453    }
454    pub fn step(&mut self, current: f64) -> i32 {
455        let v_prev = self.v;
456        let f = 3.0 * self.v - self.v.powi(3) + 2.0;
457        let g = self.alpha * (1.0 + (self.v / self.beta).tanh());
458        // TermanWang: simultaneous Euler (both derivatives use old state)
459        let dv = (f - self.w + current + self.rho) * self.dt;
460        let dw = self.epsilon * (g - self.w) * self.dt;
461        self.v += dv;
462        self.w += dw;
463        if self.v >= self.v_peak && v_prev < self.v_peak {
464            1
465        } else {
466            0
467        }
468    }
469    pub fn reset(&mut self) {
470        self.v = -1.5;
471        self.w = -0.5;
472    }
473}
474impl Default for TermanWangOscillator {
475    fn default() -> Self {
476        Self::new()
477    }
478}
479
480/// Benda-Herz 2003 — firing-rate adaptation via subtractive feedback.
481#[derive(Clone, Debug)]
482pub struct BendaHerzNeuron {
483    pub a: f64,
484    pub f_max: f64,
485    pub beta: f64,
486    pub i_half: f64,
487    pub tau_a: f64,
488    pub delta_a: f64,
489    pub dt: f64,
490    rng: Xoshiro256PlusPlus,
491}
492
493impl BendaHerzNeuron {
494    pub fn new(seed: u64) -> Self {
495        Self {
496            a: 0.0,
497            f_max: 200.0,
498            beta: 0.1,
499            i_half: 5.0,
500            tau_a: 100.0,
501            delta_a: 0.5,
502            dt: 1.0,
503            rng: Xoshiro256PlusPlus::seed_from_u64(seed),
504        }
505    }
506    pub fn step(&mut self, current: f64) -> i32 {
507        let x = current - self.a;
508        let rate = self.f_max / (1.0 + (-self.beta * (x - self.i_half)).exp());
509        self.a += (-self.a / self.tau_a + self.delta_a * rate) * self.dt;
510        let p = rate * self.dt / 1000.0;
511        if self.rng.random::<f64>() < p {
512            1
513        } else {
514            0
515        }
516    }
517    pub fn reset(&mut self) {
518        self.a = 0.0;
519    }
520}
521
522/// Alpha-synapse LIF — separate excitatory/inhibitory exponential synapses.
523#[derive(Clone, Debug)]
524pub struct AlphaNeuron {
525    pub v: f64,
526    pub i_exc: f64,
527    pub i_inh: f64,
528    pub v_rest: f64,
529    pub v_threshold: f64,
530    pub tau_v: f64,
531    pub tau_exc: f64,
532    pub tau_inh: f64,
533    pub dt: f64,
534}
535
536impl AlphaNeuron {
537    pub fn new() -> Self {
538        Self {
539            v: 0.0,
540            i_exc: 0.0,
541            i_inh: 0.0,
542            v_rest: 0.0,
543            v_threshold: 1.0,
544            tau_v: 20.0,
545            tau_exc: 5.0,
546            tau_inh: 10.0,
547            dt: 1.0,
548        }
549    }
550    pub fn step(&mut self, exc_current: f64, inh_current: f64) -> i32 {
551        self.i_exc += (-self.i_exc / self.tau_exc + exc_current) * self.dt;
552        self.i_inh += (-self.i_inh / self.tau_inh + inh_current) * self.dt;
553        self.v += (-(self.v - self.v_rest) + self.i_exc - self.i_inh) / self.tau_v * self.dt;
554        if self.v >= self.v_threshold {
555            self.v = self.v_rest;
556            1
557        } else {
558            0
559        }
560    }
561    pub fn reset(&mut self) {
562        self.v = self.v_rest;
563        self.i_exc = 0.0;
564        self.i_inh = 0.0;
565    }
566}
567impl Default for AlphaNeuron {
568    fn default() -> Self {
569        Self::new()
570    }
571}
572
573/// Conductance-based LIF (COBA). Brette et al. 2007.
574#[derive(Clone, Debug)]
575pub struct COBALIFNeuron {
576    pub v: f64,
577    pub g_e: f64,
578    pub g_i: f64,
579    pub c_m: f64,
580    pub g_l: f64,
581    pub e_l: f64,
582    pub e_e: f64,
583    pub e_i: f64,
584    pub tau_e: f64,
585    pub tau_i: f64,
586    pub v_threshold: f64,
587    pub v_reset: f64,
588    pub dt: f64,
589}
590
591impl COBALIFNeuron {
592    pub fn new() -> Self {
593        Self {
594            v: -65.0,
595            g_e: 0.0,
596            g_i: 0.0,
597            c_m: 200.0,
598            g_l: 10.0,
599            e_l: -65.0,
600            e_e: 0.0,
601            e_i: -80.0,
602            tau_e: 5.0,
603            tau_i: 10.0,
604            v_threshold: -50.0,
605            v_reset: -65.0,
606            dt: 0.1,
607        }
608    }
609    pub fn step(&mut self, current: f64, delta_ge: f64, delta_gi: f64) -> i32 {
610        self.g_e += delta_ge;
611        self.g_i += delta_gi;
612        let i_syn = self.g_e * (self.v - self.e_e) + self.g_i * (self.v - self.e_i);
613        self.v += (-self.g_l * (self.v - self.e_l) - i_syn + current) / self.c_m * self.dt;
614        self.g_e *= (-self.dt / self.tau_e).exp();
615        self.g_i *= (-self.dt / self.tau_i).exp();
616        if self.v >= self.v_threshold {
617            self.v = self.v_reset;
618            1
619        } else {
620            0
621        }
622    }
623    pub fn reset(&mut self) {
624        self.v = self.e_l;
625        self.g_e = 0.0;
626        self.g_i = 0.0;
627    }
628}
629impl Default for COBALIFNeuron {
630    fn default() -> Self {
631        Self::new()
632    }
633}
634
635/// Gutkin-Ermentrout — reduced cortical neuron with Type-I excitability.
636#[derive(Clone, Debug)]
637pub struct GutkinErmentroutNeuron {
638    pub v: f64,
639    pub n: f64,
640    pub g_na: f64,
641    pub g_k: f64,
642    pub g_l: f64,
643    pub e_na: f64,
644    pub e_k: f64,
645    pub e_l: f64,
646    pub dt: f64,
647    pub v_threshold: f64,
648}
649
650impl GutkinErmentroutNeuron {
651    pub fn new() -> Self {
652        Self {
653            v: -65.0,
654            n: 0.1,
655            g_na: 20.0,
656            g_k: 10.0,
657            g_l: 8.0,
658            e_na: 60.0,
659            e_k: -90.0,
660            e_l: -80.0,
661            dt: 0.05,
662            v_threshold: -20.0,
663        }
664    }
665    pub fn step(&mut self, current: f64) -> i32 {
666        let v_prev = self.v;
667        let m_inf = 1.0 / (1.0 + (-(self.v + 20.0) / 15.0).exp());
668        let n_inf = 1.0 / (1.0 + (-(self.v + 25.0) / 5.0).exp());
669        self.n += (n_inf - self.n) * self.dt;
670        let i_na = self.g_na * m_inf * (self.v - self.e_na);
671        let i_k = self.g_k * self.n * (self.v - self.e_k);
672        let i_l = self.g_l * (self.v - self.e_l);
673        self.v += (-i_na - i_k - i_l + current) * self.dt;
674        if self.v >= self.v_threshold && v_prev < self.v_threshold {
675            1
676        } else {
677            0
678        }
679    }
680    pub fn reset(&mut self) {
681        self.v = -65.0;
682        self.n = 0.1;
683    }
684}
685impl Default for GutkinErmentroutNeuron {
686    fn default() -> Self {
687        Self::new()
688    }
689}
690
691/// Wilson HR — simplified cortical neuron. Wilson 1999.
692#[derive(Clone, Debug)]
693pub struct WilsonHRNeuron {
694    pub v: f64,
695    pub r: f64,
696    pub tau_r: f64,
697    pub v_peak: f64,
698    pub dt: f64,
699}
700
701impl WilsonHRNeuron {
702    pub fn new() -> Self {
703        Self {
704            v: -0.7,
705            r: 0.1,
706            tau_r: 1.9,
707            v_peak: 0.4,
708            dt: 0.05,
709        }
710    }
711    pub fn step(&mut self, current: f64) -> i32 {
712        // Wilson 1999: simultaneous Euler (both derivatives use old state)
713        let poly = -(17.81 + 47.71 * self.v + 32.63 * self.v * self.v) * (self.v - 0.55);
714        let syn = -26.0 * self.r * (self.v + 0.92);
715        let dv = (poly + syn + current) * self.dt;
716        let dr = (-self.r + 1.35 * self.v + 1.03) / self.tau_r * self.dt;
717        self.v += dv;
718        self.r += dr;
719        // Wilson 1999: hard reset on threshold (not crossing)
720        if self.v >= self.v_peak {
721            self.v = -0.7;
722            1
723        } else {
724            0
725        }
726    }
727    pub fn reset(&mut self) {
728        self.v = -0.7;
729        self.r = 0.1;
730    }
731}
732impl Default for WilsonHRNeuron {
733    fn default() -> Self {
734        Self::new()
735    }
736}
737
738/// Chay 1985 — pancreatic beta cell with Ca-dependent K.
739#[derive(Clone, Debug)]
740pub struct ChayNeuron {
741    pub v: f64,
742    pub n: f64,
743    pub ca: f64,
744    pub g_ca: f64,
745    pub g_k: f64,
746    pub g_kca: f64,
747    pub g_l: f64,
748    pub e_ca: f64,
749    pub e_k: f64,
750    pub e_l: f64,
751    pub rho: f64,
752    pub alpha_ca: f64,
753    pub k_ca: f64,
754    pub dt: f64,
755    pub v_threshold: f64,
756}
757
758impl ChayNeuron {
759    pub fn new() -> Self {
760        Self {
761            v: -50.0,
762            n: 0.1,
763            ca: 0.1,
764            g_ca: 25.0,
765            g_k: 1400.0,
766            g_kca: 12.0,
767            g_l: 7.0,
768            e_ca: 100.0,
769            e_k: -75.0,
770            e_l: -40.0,
771            rho: 0.00015,
772            alpha_ca: 0.002,
773            k_ca: 0.04,
774            dt: 0.02,
775            v_threshold: -20.0,
776        }
777    }
778    pub fn step(&mut self, current: f64) -> i32 {
779        let v_prev = self.v;
780        let m_inf = 1.0 / (1.0 + (-(self.v + 25.0) / 8.0).exp());
781        let n_inf = 1.0 / (1.0 + (-(self.v + 18.0) / 14.0).exp());
782        let d = (self.v + 18.0).abs().max(0.01);
783        let tau_n = 1.0 / (0.01 * d);
784        let kca_act = self.ca / (self.ca + 1.0);
785        let i_ca = self.g_ca * m_inf * (self.v - self.e_ca);
786        let i_k = self.g_k * self.n * (self.v - self.e_k);
787        let i_kca = self.g_kca * kca_act * (self.v - self.e_k);
788        let i_l = self.g_l * (self.v - self.e_l);
789        self.v += (-i_ca - i_k - i_kca - i_l + current) * self.dt;
790        self.v = self.v.clamp(-200.0, 200.0);
791        self.n += (n_inf - self.n) / tau_n.max(0.01) * self.dt;
792        self.n = self.n.clamp(0.0, 1.0);
793        self.ca =
794            (self.ca + self.rho * (-self.alpha_ca * i_ca - self.k_ca * self.ca) * self.dt).max(0.0);
795        if self.v >= self.v_threshold && v_prev < self.v_threshold {
796            1
797        } else {
798            0
799        }
800    }
801    pub fn reset(&mut self) {
802        self.v = -50.0;
803        self.n = 0.1;
804        self.ca = 0.1;
805    }
806}
807impl Default for ChayNeuron {
808    fn default() -> Self {
809        Self::new()
810    }
811}
812
813/// Chay-Keizer — modified beta cell with inactivating Ca current.
814#[derive(Clone, Debug)]
815pub struct ChayKeizerNeuron {
816    pub v: f64,
817    pub n: f64,
818    pub ca: f64,
819    pub g_ca: f64,
820    pub g_k: f64,
821    pub g_kca: f64,
822    pub g_l: f64,
823    pub e_ca: f64,
824    pub e_k: f64,
825    pub e_l: f64,
826    pub k_d: f64,
827    pub f_ca: f64,
828    pub k_ca: f64,
829    pub dt: f64,
830    pub v_threshold: f64,
831}
832
833impl ChayKeizerNeuron {
834    pub fn new() -> Self {
835        Self {
836            v: -50.0,
837            n: 0.01,
838            ca: 0.1,
839            g_ca: 20.0,
840            g_k: 25.0,
841            g_kca: 12.0,
842            g_l: 0.1,
843            e_ca: 100.0,
844            e_k: -75.0,
845            e_l: -40.0,
846            k_d: 1.0,
847            f_ca: 0.004,
848            k_ca: 0.03,
849            dt: 0.02,
850            v_threshold: -20.0,
851        }
852    }
853    pub fn step(&mut self, current: f64) -> i32 {
854        let v_prev = self.v;
855        let m_inf = 1.0 / (1.0 + (-(self.v + 25.0) / 8.0).exp());
856        let n_inf = 1.0 / (1.0 + (-(self.v + 18.0) / 14.0).exp());
857        let tau_n = (20.0 / (1.0 + ((self.v + 18.0) / 14.0).exp())).max(0.1);
858        let q_kca = self.ca / (self.ca + self.k_d);
859        let i_ca = self.g_ca * m_inf * (self.v - self.e_ca);
860        let i_k = self.g_k * self.n * (self.v - self.e_k);
861        let i_kca = self.g_kca * q_kca * (self.v - self.e_k);
862        let i_l = self.g_l * (self.v - self.e_l);
863        self.v += (-i_ca - i_k - i_kca - i_l + current) * self.dt;
864        self.v = self.v.clamp(-200.0, 200.0);
865        self.n += (n_inf - self.n) / tau_n * self.dt;
866        self.ca = (self.ca + (-self.f_ca * i_ca - self.k_ca * self.ca) * self.dt).max(0.0);
867        if self.v >= self.v_threshold && v_prev < self.v_threshold {
868            1
869        } else {
870            0
871        }
872    }
873    pub fn reset(&mut self) {
874        self.v = -50.0;
875        self.n = 0.01;
876        self.ca = 0.1;
877    }
878}
879impl Default for ChayKeizerNeuron {
880    fn default() -> Self {
881        Self::new()
882    }
883}
884
885/// Sherman-Rinzel-Keizer 1988 — pancreatic beta cell (reduced).
886#[derive(Clone, Debug)]
887pub struct ShermanRinzelKeizerNeuron {
888    pub v: f64,
889    pub n: f64,
890    pub s: f64,
891    pub g_ca: f64,
892    pub g_k: f64,
893    pub g_s: f64,
894    pub e_ca: f64,
895    pub e_k: f64,
896    pub tau_s: f64,
897    pub dt: f64,
898    pub v_threshold: f64,
899}
900
901impl ShermanRinzelKeizerNeuron {
902    pub fn new() -> Self {
903        Self {
904            v: -50.0,
905            n: 0.1,
906            s: 0.1,
907            g_ca: 3.6,
908            g_k: 10.0,
909            g_s: 4.0,
910            e_ca: 25.0,
911            e_k: -75.0,
912            tau_s: 5000.0,
913            dt: 0.5,
914            v_threshold: -20.0,
915        }
916    }
917    pub fn step(&mut self, current: f64) -> i32 {
918        let v_prev = self.v;
919        let m_inf = 1.0 / (1.0 + (-(self.v + 20.0) / 12.0).exp());
920        let n_inf = 1.0 / (1.0 + (-(self.v + 16.0) / 5.0).exp());
921        let s_inf = 1.0 / (1.0 + (-(self.v + 35.0) / 10.0).exp());
922        let tau_n = 9.09;
923        let i_ca = self.g_ca * m_inf * (self.v - self.e_ca);
924        let i_k = self.g_k * self.n * (self.v - self.e_k);
925        let i_s = self.g_s * self.s * (self.v - self.e_k);
926        self.v += (-i_ca - i_k - i_s + current) * self.dt;
927        self.n += (n_inf - self.n) / tau_n * self.dt;
928        self.s += (s_inf - self.s) / self.tau_s * self.dt;
929        if self.v >= self.v_threshold && v_prev < self.v_threshold {
930            1
931        } else {
932            0
933        }
934    }
935    pub fn reset(&mut self) {
936        self.v = -50.0;
937        self.n = 0.1;
938        self.s = 0.1;
939    }
940}
941impl Default for ShermanRinzelKeizerNeuron {
942    fn default() -> Self {
943        Self::new()
944    }
945}
946
947/// Butera pre-Bötzinger respiratory neuron. Butera et al. 1999.
948#[derive(Clone, Debug)]
949pub struct ButeraRespiratoryNeuron {
950    pub v: f64,
951    pub n: f64,
952    pub h_nap: f64,
953    pub g_na: f64,
954    pub g_nap: f64,
955    pub g_k: f64,
956    pub g_l: f64,
957    pub e_na: f64,
958    pub e_k: f64,
959    pub e_l: f64,
960    pub tau_h: f64,
961    pub dt: f64,
962    pub v_threshold: f64,
963}
964
965impl ButeraRespiratoryNeuron {
966    pub fn new() -> Self {
967        Self {
968            v: -50.0,
969            n: 0.01,
970            h_nap: 0.5,
971            g_na: 28.0,
972            g_nap: 2.8,
973            g_k: 11.2,
974            g_l: 2.8,
975            e_na: 50.0,
976            e_k: -85.0,
977            e_l: -65.0,
978            tau_h: 10000.0,
979            dt: 0.1,
980            v_threshold: -20.0,
981        }
982    }
983    pub fn step(&mut self, current: f64) -> i32 {
984        let v_prev = self.v;
985        let m_na = 1.0 / (1.0 + (-(self.v + 34.0) / 5.0).exp());
986        let n_inf = 1.0 / (1.0 + (-(self.v + 29.0) / 4.0).exp());
987        let m_nap = 1.0 / (1.0 + (-(self.v + 40.0) / 6.0).exp());
988        let h_nap_inf = 1.0 / (1.0 + ((self.v + 48.0) / 6.0).exp());
989        // Butera et al. 1999 Model I: tau_n = 10/cosh((V+29)/8)
990        let tau_n = (10.0 / ((self.v + 29.0) / 8.0).cosh().max(1e-12)).max(0.01);
991        // Model I uses (1-n) for fast Na inactivation, not a separate h gate
992        let i_na = self.g_na * m_na.powi(3) * (1.0 - self.n) * (self.v - self.e_na);
993        let i_nap = self.g_nap * m_nap * self.h_nap * (self.v - self.e_na);
994        let i_k = self.g_k * self.n.powi(4) * (self.v - self.e_k);
995        let i_l = self.g_l * (self.v - self.e_l);
996        // tau_h_nap = tau_h / cosh((V+48)/12) (Butera 1999)
997        let tau_h_eff = (self.tau_h / ((self.v + 48.0) / 12.0).cosh().max(1e-12)).max(0.1);
998        self.v = (self.v + (-i_na - i_nap - i_k - i_l + current) * self.dt).clamp(-200.0, 100.0);
999        self.n = (self.n + (n_inf - self.n) / tau_n * self.dt).clamp(0.0, 1.0);
1000        self.h_nap = (self.h_nap + (h_nap_inf - self.h_nap) / tau_h_eff * self.dt).clamp(0.0, 1.0);
1001        if self.v >= self.v_threshold && v_prev < self.v_threshold {
1002            1
1003        } else {
1004            0
1005        }
1006    }
1007    pub fn reset(&mut self) {
1008        self.v = -50.0;
1009        self.n = 0.01;
1010        self.h_nap = 0.5;
1011    }
1012}
1013impl Default for ButeraRespiratoryNeuron {
1014    fn default() -> Self {
1015        Self::new()
1016    }
1017}
1018
1019/// e-prop ALIF — adaptive LIF for eligibility-propagation learning. Bellec et al. 2020.
1020#[derive(Clone, Debug)]
1021pub struct EPropALIFNeuron {
1022    pub v: f64,
1023    pub a: f64,
1024    pub e_trace: f64,
1025    pub alpha_m: f64,
1026    pub alpha_a: f64,
1027    pub v_threshold_base: f64,
1028    pub beta: f64,
1029}
1030
1031impl EPropALIFNeuron {
1032    pub fn new(tau_m: f64, tau_a: f64, dt: f64) -> Self {
1033        Self {
1034            v: 0.0,
1035            a: 0.0,
1036            e_trace: 0.0,
1037            alpha_m: (-dt / tau_m).exp(),
1038            alpha_a: (-dt / tau_a).exp(),
1039            v_threshold_base: 1.0,
1040            beta: 0.07,
1041        }
1042    }
1043    pub fn step(&mut self, current: f64) -> i32 {
1044        self.v = self.alpha_m * self.v + current;
1045        let threshold = self.v_threshold_base + self.beta * self.a;
1046        let psi = ((1.0 - (self.v - threshold).abs()) * 0.3).max(0.0);
1047        self.e_trace = self.alpha_a * self.e_trace + psi;
1048        if self.v >= threshold {
1049            self.v = 0.0;
1050            self.a = self.alpha_a * self.a + 1.0;
1051            1
1052        } else {
1053            self.a *= self.alpha_a;
1054            0
1055        }
1056    }
1057    pub fn reset(&mut self) {
1058        self.v = 0.0;
1059        self.a = 0.0;
1060        self.e_trace = 0.0;
1061    }
1062}
1063
1064impl Default for EPropALIFNeuron {
1065    fn default() -> Self {
1066        Self::new(20.0, 200.0, 1.0)
1067    }
1068}
1069
1070/// SuperSpike — LIF with surrogate gradient trace. Zenke & Ganguli 2018.
1071#[derive(Clone, Debug)]
1072pub struct SuperSpikeNeuron {
1073    pub v: f64,
1074    pub trace: f64,
1075    pub alpha_m: f64,
1076    pub alpha_e: f64,
1077    pub v_threshold: f64,
1078    pub v_reset: f64,
1079    pub beta_sg: f64,
1080}
1081
1082impl SuperSpikeNeuron {
1083    pub fn new(tau_m: f64, tau_e: f64, dt: f64) -> Self {
1084        Self {
1085            v: 0.0,
1086            trace: 0.0,
1087            alpha_m: (-dt / tau_m).exp(),
1088            alpha_e: (-dt / tau_e).exp(),
1089            v_threshold: 1.0,
1090            v_reset: 0.0,
1091            beta_sg: 10.0,
1092        }
1093    }
1094    pub fn step(&mut self, current: f64) -> i32 {
1095        self.v = self.alpha_m * self.v + current;
1096        let sg = 1.0 / (self.beta_sg * (self.v - self.v_threshold).abs() + 1.0).powi(2);
1097        self.trace = self.alpha_e * self.trace + sg;
1098        if self.v >= self.v_threshold {
1099            self.v = self.v_reset;
1100            1
1101        } else {
1102            0
1103        }
1104    }
1105    pub fn reset(&mut self) {
1106        self.v = 0.0;
1107        self.trace = 0.0;
1108    }
1109}
1110
1111impl Default for SuperSpikeNeuron {
1112    fn default() -> Self {
1113        Self::new(10.0, 10.0, 1.0)
1114    }
1115}
1116
1117/// Learnable Neuron Model (LNM) — parameterised activation + decay.
1118#[derive(Clone, Debug)]
1119pub struct LearnableNeuronModel {
1120    pub v: f64,
1121    pub alpha: f64,
1122    pub beta: f64,
1123    pub gamma: f64,
1124    pub v_threshold: f64,
1125    pub f_slope: f64,
1126    pub f_shift: f64,
1127}
1128
1129impl LearnableNeuronModel {
1130    pub fn new() -> Self {
1131        Self {
1132            v: 0.0,
1133            alpha: 0.9,
1134            beta: 0.1,
1135            gamma: 0.05,
1136            v_threshold: 1.0,
1137            f_slope: 5.0,
1138            f_shift: 0.5,
1139        }
1140    }
1141    pub fn step(&mut self, current: f64) -> i32 {
1142        let f_v = 1.0 / (1.0 + (-(self.f_slope * (self.v - self.f_shift))).exp());
1143        self.v = self.alpha * self.v + self.beta * current + self.gamma * f_v;
1144        if self.v >= self.v_threshold {
1145            self.v = 0.0;
1146            1
1147        } else {
1148            0
1149        }
1150    }
1151    pub fn reset(&mut self) {
1152        self.v = 0.0;
1153    }
1154}
1155impl Default for LearnableNeuronModel {
1156    fn default() -> Self {
1157        Self::new()
1158    }
1159}
1160
1161/// Pernarowski 1994 — simplified beta cell burster (3 ODE).
1162#[derive(Clone, Debug)]
1163pub struct PernarowskiNeuron {
1164    pub v: f64,
1165    pub w: f64,
1166    pub z: f64,
1167    pub alpha: f64,
1168    pub beta: f64,
1169    pub eps1: f64,
1170    pub eps2: f64,
1171    pub gamma: f64,
1172    pub dt: f64,
1173    pub v_threshold: f64,
1174}
1175
1176impl PernarowskiNeuron {
1177    pub fn new() -> Self {
1178        Self {
1179            v: -1.0,
1180            w: 0.0,
1181            z: 0.0,
1182            alpha: 0.1,
1183            beta: 0.5,
1184            eps1: 0.1,
1185            eps2: 0.001,
1186            gamma: 0.5,
1187            dt: 0.1,
1188            v_threshold: 0.5,
1189        }
1190    }
1191    pub fn step(&mut self, current: f64) -> i32 {
1192        let v_prev = self.v;
1193        let f_v = self.v - self.v.powi(3) / 3.0;
1194        // Pernarowski: simultaneous Euler (all derivatives use old state)
1195        let dv = (f_v - self.w - self.z + current) * self.dt;
1196        let dw = self.eps1 * (self.v - self.gamma * self.w + self.alpha) * self.dt;
1197        let dz = self.eps2 * (self.beta * (self.v + 0.7) - self.z) * self.dt;
1198        self.v += dv;
1199        self.w += dw;
1200        self.z += dz;
1201        if self.v >= self.v_threshold && v_prev < self.v_threshold {
1202            1
1203        } else {
1204            0
1205        }
1206    }
1207    pub fn reset(&mut self) {
1208        self.v = -1.0;
1209        self.w = 0.0;
1210        self.z = 0.0;
1211    }
1212}
1213impl Default for PernarowskiNeuron {
1214    fn default() -> Self {
1215        Self::new()
1216    }
1217}
1218
1219// ═══════════════════════════════════════════════════════════════════
1220// Brunel-Wang LIF with NMDA/AMPA/GABA
1221// ═══════════════════════════════════════════════════════════════════
1222
1223/// Brunel-Wang 2001 — LIF with NMDA (Mg²⁺ block), AMPA, and GABA synaptic
1224/// conductances for decision-making and working memory circuits.
1225///
1226/// Key feature: voltage-dependent NMDA conductance via Mg²⁺ block factor
1227/// `1 / (1 + [Mg²⁺]/3.57 · exp(-0.062·V))`. This creates positive feedback
1228/// that sustains persistent activity in recurrent circuits.
1229///
1230/// The single-current interface routes external input to i_ampa_ext; recurrent
1231/// AMPA/NMDA/GABA inputs are zero (use the multi-arg `step_full()` for
1232/// network simulations).
1233///
1234/// Brunel, N. & Wang, X.J., J Comput Neurosci 11:63, 2001.
1235#[derive(Clone, Debug)]
1236pub struct BrunelWangNeuron {
1237    pub v: f64,
1238    pub v_rest: f64,
1239    pub v_reset: f64,
1240    pub v_threshold: f64,
1241    pub tau_m: f64,
1242    pub tau_ref: f64,
1243    pub g_ampa_ext: f64,
1244    pub g_ampa_rec: f64,
1245    pub g_nmda: f64,
1246    pub g_gaba: f64,
1247    pub v_ampa: f64,
1248    pub v_nmda: f64,
1249    pub v_gaba: f64,
1250    pub c_m: f64,
1251    pub mg_conc: f64,
1252    pub dt: f64,
1253    pub ref_remaining: f64,
1254    pub gain: f64,
1255}
1256
1257impl BrunelWangNeuron {
1258    pub fn new() -> Self {
1259        Self {
1260            v: -70.0,
1261            v_rest: -70.0,
1262            v_reset: -55.0,
1263            v_threshold: -50.0,
1264            tau_m: 20.0,
1265            tau_ref: 2.0,
1266            g_ampa_ext: 2.1,
1267            g_ampa_rec: 0.05,
1268            g_nmda: 0.165,
1269            g_gaba: 1.3,
1270            v_ampa: 0.0,
1271            v_nmda: 0.0,
1272            v_gaba: -70.0,
1273            c_m: 0.5,
1274            mg_conc: 1.0,
1275            dt: 0.1,
1276            ref_remaining: 0.0,
1277            gain: 1.0,
1278        }
1279    }
1280
1281    /// Mg²⁺ block factor (Jahr & Stevens 1990).
1282    #[inline]
1283    fn nmda_mg_block(&self, v: f64) -> f64 {
1284        1.0 / (1.0 + self.mg_conc / 3.57 * (-0.062 * v).exp())
1285    }
1286
1287    /// Full step with all 4 synaptic inputs (for network simulations).
1288    pub fn step_full(
1289        &mut self,
1290        i_ampa_ext: f64,
1291        s_ampa_rec: f64,
1292        s_nmda_rec: f64,
1293        s_gaba: f64,
1294    ) -> i32 {
1295        if self.ref_remaining > 0.0 {
1296            self.ref_remaining -= self.dt;
1297            return 0;
1298        }
1299
1300        let v = self.v;
1301
1302        // Synaptic currents (conductance-based)
1303        let i_ampa = -self.g_ampa_ext * (v - self.v_ampa) * i_ampa_ext
1304            - self.g_ampa_rec * (v - self.v_ampa) * s_ampa_rec;
1305        let i_nmda = -self.g_nmda * self.nmda_mg_block(v) * (v - self.v_nmda) * s_nmda_rec;
1306        let i_gaba = -self.g_gaba * (v - self.v_gaba) * s_gaba;
1307
1308        // Membrane dynamics (LIF)
1309        let i_leak = -(v - self.v_rest) / self.tau_m;
1310        let dv = (i_leak + (i_ampa + i_nmda + i_gaba) / self.c_m) * self.dt;
1311        self.v += dv;
1312
1313        if self.v >= self.v_threshold {
1314            self.v = self.v_reset;
1315            self.ref_remaining = self.tau_ref;
1316            1
1317        } else {
1318            0
1319        }
1320    }
1321
1322    /// Single-current interface: routes input to external AMPA drive.
1323    pub fn step(&mut self, current: f64) -> i32 {
1324        self.step_full(self.gain * current, 0.0, 0.0, 0.0)
1325    }
1326
1327    pub fn reset(&mut self) {
1328        self.v = self.v_rest;
1329        self.ref_remaining = 0.0;
1330    }
1331}
1332
1333impl Default for BrunelWangNeuron {
1334    fn default() -> Self {
1335        Self::new()
1336    }
1337}
1338
1339#[cfg(test)]
1340mod tests {
1341    use super::*;
1342
1343    #[test]
1344    fn fhn_fires() {
1345        let mut n = FitzHughNagumoNeuron::new();
1346        let t: i32 = (0..2000).map(|_| n.step(1.0)).sum();
1347        assert!(t > 0);
1348    }
1349    #[test]
1350    fn morris_lecar_fires() {
1351        let mut n = MorrisLecarNeuron::new();
1352        let t: i32 = (0..2000).map(|_| n.step(100.0)).sum();
1353        assert!(t > 0);
1354    }
1355    #[test]
1356    fn hr_fires() {
1357        let mut n = HindmarshRoseNeuron::new();
1358        let t: i32 = (0..2000).map(|_| n.step(3.0)).sum();
1359        assert!(t > 0);
1360    }
1361    #[test]
1362    fn rnf_fires() {
1363        let mut n = ResonateAndFireNeuron::new();
1364        let t: i32 = (0..5000).map(|_| n.step(3.0)).sum();
1365        assert!(t > 0);
1366    }
1367    #[test]
1368    fn fhr_fires() {
1369        let mut n = FitzHughRinzelNeuron::new();
1370        let t: i32 = (0..2000).map(|_| n.step(1.0)).sum();
1371        assert!(t > 0);
1372    }
1373    #[test]
1374    fn mckean_fires() {
1375        let mut n = McKeanNeuron::new();
1376        let t: i32 = (0..2000).map(|_| n.step(0.5)).sum();
1377        assert!(t > 0);
1378    }
1379    #[test]
1380    fn tw_fires() {
1381        let mut n = TermanWangOscillator::new();
1382        let t: i32 = (0..2000).map(|_| n.step(0.5)).sum();
1383        assert!(t > 0);
1384    }
1385    #[test]
1386    fn benda_herz_fires() {
1387        let mut n = BendaHerzNeuron::new(42);
1388        let t: i32 = (0..10000).map(|_| n.step(20.0)).sum();
1389        assert!(t > 0);
1390    }
1391    #[test]
1392    fn alpha_fires() {
1393        let mut n = AlphaNeuron::new();
1394        let t: i32 = (0..100).map(|_| n.step(0.5, 0.0)).sum();
1395        assert!(t > 0);
1396    }
1397    #[test]
1398    fn coba_fires() {
1399        let mut n = COBALIFNeuron::new();
1400        let t: i32 = (0..2000).map(|_| n.step(500.0, 0.0, 0.0)).sum();
1401        assert!(t > 0);
1402    }
1403    #[test]
1404    fn gutkin_fires() {
1405        let mut n = GutkinErmentroutNeuron::new();
1406        let t: i32 = (0..2000).map(|_| n.step(15.0)).sum();
1407        assert!(t > 0);
1408    }
1409    #[test]
1410    fn wilson_hr_fires() {
1411        let mut n = WilsonHRNeuron::new();
1412        let t: i32 = (0..2000).map(|_| n.step(0.5)).sum();
1413        assert!(t > 0);
1414    }
1415    #[test]
1416    fn chay_fires() {
1417        let mut n = ChayNeuron::new();
1418        let t: i32 = (0..5000).map(|_| n.step(20.0)).sum();
1419        assert!(t > 0);
1420    }
1421    #[test]
1422    fn chay_keizer_fires() {
1423        let mut n = ChayKeizerNeuron::new();
1424        let t: i32 = (0..5000).map(|_| n.step(10.0)).sum();
1425        assert!(t > 0);
1426    }
1427    #[test]
1428    fn srk_fires() {
1429        let mut n = ShermanRinzelKeizerNeuron::new();
1430        let t: i32 = (0..5000).map(|_| n.step(5.0)).sum();
1431        assert!(t > 0);
1432    }
1433    #[test]
1434    fn butera_fires() {
1435        let mut n = ButeraRespiratoryNeuron::new();
1436        let t: i32 = (0..20000).map(|_| n.step(50.0)).sum();
1437        assert!(t > 0);
1438    }
1439    #[test]
1440    fn eprop_fires() {
1441        let mut n = EPropALIFNeuron::default();
1442        let t: i32 = (0..50).map(|_| n.step(0.5)).sum();
1443        assert!(t > 0);
1444    }
1445    #[test]
1446    fn superspike_fires() {
1447        let mut n = SuperSpikeNeuron::default();
1448        let t: i32 = (0..50).map(|_| n.step(0.5)).sum();
1449        assert!(t > 0);
1450    }
1451    #[test]
1452    fn lnm_fires() {
1453        let mut n = LearnableNeuronModel::new();
1454        let t: i32 = (0..50).map(|_| n.step(2.0)).sum();
1455        assert!(t > 0);
1456    }
1457    #[test]
1458    fn pernarowski_fires() {
1459        let mut n = PernarowskiNeuron::new();
1460        let t: i32 = (0..2000).map(|_| n.step(1.0)).sum();
1461        assert!(t > 0);
1462    }
1463
1464    // ── Multi-angle tests for simple_spiking models ──
1465
1466    // -- FitzHughNagumo --
1467    #[test]
1468    fn fhn_silent_without_input() {
1469        let mut n = FitzHughNagumoNeuron::new();
1470        let t: i32 = (0..2000).map(|_| n.step(0.0)).sum();
1471        assert_eq!(t, 0);
1472    }
1473    #[test]
1474    fn fhn_reset_clears_state() {
1475        let mut n = FitzHughNagumoNeuron::new();
1476        for _ in 0..500 {
1477            n.step(1.0);
1478        }
1479        n.reset();
1480        assert!((n.v - (-1.0)).abs() < 1e-10);
1481        assert!((n.w - (-0.5)).abs() < 1e-10);
1482    }
1483    #[test]
1484    fn fhn_moderate_input_stable() {
1485        let mut n = FitzHughNagumoNeuron::new();
1486        for _ in 0..2000 {
1487            n.step(2.0);
1488        }
1489        assert!(n.v.is_finite());
1490    }
1491    #[test]
1492    fn fhn_recovery_variable() {
1493        let mut n = FitzHughNagumoNeuron::new();
1494        for _ in 0..2000 {
1495            n.step(1.0);
1496        }
1497        // w should have evolved from initial
1498        assert!((n.w - (-0.5)).abs() > 0.01, "recovery w should change");
1499    }
1500    #[test]
1501    fn fhn_nan_no_panic() {
1502        FitzHughNagumoNeuron::new().step(f64::NAN);
1503    }
1504    #[test]
1505    fn fhn_negative_no_crash() {
1506        let mut n = FitzHughNagumoNeuron::new();
1507        for _ in 0..500 {
1508            n.step(-5.0);
1509        }
1510        assert!(n.v.is_finite());
1511    }
1512
1513    // -- MorrisLecar --
1514    #[test]
1515    fn ml_silent_without_input() {
1516        let mut n = MorrisLecarNeuron::new();
1517        let t: i32 = (0..500).map(|_| n.step(0.0)).sum();
1518        assert_eq!(t, 0);
1519    }
1520    #[test]
1521    fn ml_reset_clears_state() {
1522        let mut n = MorrisLecarNeuron::new();
1523        for _ in 0..500 {
1524            n.step(100.0);
1525        }
1526        n.reset();
1527        assert!((n.v - (-60.0)).abs() < 1e-10);
1528    }
1529    #[test]
1530    fn ml_moderate_input_stable() {
1531        let mut n = MorrisLecarNeuron::new();
1532        for _ in 0..2000 {
1533            n.step(200.0);
1534        }
1535        assert!(n.v.is_finite());
1536    }
1537    #[test]
1538    fn ml_nan_no_panic() {
1539        MorrisLecarNeuron::new().step(f64::NAN);
1540    }
1541    #[test]
1542    fn ml_negative_no_crash() {
1543        let mut n = MorrisLecarNeuron::new();
1544        for _ in 0..500 {
1545            n.step(-50.0);
1546        }
1547        assert!(n.v.is_finite());
1548    }
1549    #[test]
1550    fn ml_k_gating_bounded() {
1551        let mut n = MorrisLecarNeuron::new();
1552        for _ in 0..2000 {
1553            n.step(100.0);
1554        }
1555        assert!(n.w >= 0.0 && n.w <= 1.0, "w={}", n.w);
1556    }
1557
1558    // -- HindmarshRose --
1559    #[test]
1560    fn hr_reset_clears_state() {
1561        let mut n = HindmarshRoseNeuron::new();
1562        for _ in 0..500 {
1563            n.step(3.0);
1564        }
1565        n.reset();
1566        assert!((n.x - (-1.6)).abs() < 1e-10);
1567    }
1568    #[test]
1569    fn hr_moderate_input_stable() {
1570        let mut n = HindmarshRoseNeuron::new();
1571        for _ in 0..2000 {
1572            n.step(5.0);
1573        }
1574        assert!(n.x.is_finite());
1575    }
1576    #[test]
1577    fn hr_slow_z_evolves() {
1578        let mut n = HindmarshRoseNeuron::new();
1579        let z0 = n.z;
1580        for _ in 0..2000 {
1581            n.step(3.0);
1582        }
1583        assert!((n.z - z0).abs() > 0.001, "slow variable z should evolve");
1584    }
1585    #[test]
1586    fn hr_nan_no_panic() {
1587        HindmarshRoseNeuron::new().step(f64::NAN);
1588    }
1589    #[test]
1590    fn hr_negative_no_crash() {
1591        let mut n = HindmarshRoseNeuron::new();
1592        for _ in 0..500 {
1593            n.step(-1.0);
1594        }
1595        assert!(n.x.is_finite());
1596    }
1597
1598    // -- ResonateAndFire --
1599    #[test]
1600    fn rnf_reset_clears_state() {
1601        let mut n = ResonateAndFireNeuron::new();
1602        for _ in 0..500 {
1603            n.step(3.0);
1604        }
1605        n.reset();
1606        assert!((n.x - 0.0).abs() < 1e-10);
1607    }
1608    #[test]
1609    fn rnf_bounded() {
1610        let mut n = ResonateAndFireNeuron::new();
1611        for _ in 0..5000 {
1612            n.step(100.0);
1613        }
1614        assert!(n.x.is_finite());
1615    }
1616    #[test]
1617    fn rnf_nan_no_panic() {
1618        ResonateAndFireNeuron::new().step(f64::NAN);
1619    }
1620    #[test]
1621    fn rnf_negative_no_crash() {
1622        let mut n = ResonateAndFireNeuron::new();
1623        for _ in 0..500 {
1624            n.step(-5.0);
1625        }
1626        assert!(n.x.is_finite());
1627    }
1628    #[test]
1629    fn rnf_subthreshold_oscillation() {
1630        let mut n = ResonateAndFireNeuron::new();
1631        for _ in 0..100 {
1632            n.step(0.5);
1633        }
1634        assert!(n.x.abs() > 0.0 || n.y.abs() > 0.0);
1635    }
1636
1637    #[test]
1638    fn brf_boundary_matches_algorithm() {
1639        let p = brf_sustain_oscillation_boundary(10.0, 0.01);
1640        let expected = (-1.0 + (1.0_f64 - 0.1_f64 * 0.1_f64).sqrt()) / 0.01;
1641        assert!((p - expected).abs() < 1e-12);
1642    }
1643
1644    #[test]
1645    fn brf_step_matches_algorithm_one_step() {
1646        let mut n = BalancedResonateAndFireNeuron {
1647            x: 0.2,
1648            y: -0.1,
1649            q: 0.3,
1650            omega: 12.0,
1651            b_offset: 0.75,
1652            threshold: 1.0,
1653            gamma: 0.9,
1654            dt: 0.01,
1655        };
1656        let p_omega = brf_sustain_oscillation_boundary(12.0, 0.01);
1657        let b_t = p_omega - 0.75 - 0.3;
1658        let expected_x = 0.2 + 0.01 * (b_t * 0.2 - 12.0 * -0.1 + 2.0);
1659        let expected_y = -0.1 + 0.01 * (12.0 * 0.2 + b_t * -0.1);
1660        let expected_spike = if expected_x >= 1.3 { 1 } else { 0 };
1661        let spike = n.step(2.0);
1662        assert_eq!(spike, expected_spike);
1663        assert!((n.x - expected_x).abs() < 1e-12);
1664        assert!((n.y - expected_y).abs() < 1e-12);
1665        assert!((n.q - (0.9 * 0.3 + expected_spike as f64)).abs() < 1e-12);
1666    }
1667
1668    #[test]
1669    fn brf_reset_clears_membrane_and_refractory_state() {
1670        let mut n = BalancedResonateAndFireNeuron::new();
1671        assert_eq!(n.step(200.0), 1);
1672        n.reset();
1673        assert_eq!(n.x, 0.0);
1674        assert_eq!(n.y, 0.0);
1675        assert_eq!(n.q, 0.0);
1676    }
1677
1678    // -- FitzHughRinzel --
1679    #[test]
1680    fn fhr_reset_clears_state() {
1681        let mut n = FitzHughRinzelNeuron::new();
1682        for _ in 0..500 {
1683            n.step(1.0);
1684        }
1685        n.reset();
1686        assert!((n.v - (-1.0)).abs() < 1e-10);
1687    }
1688    #[test]
1689    fn fhr_bounded() {
1690        let mut n = FitzHughRinzelNeuron::new();
1691        for _ in 0..2000 {
1692            n.step(50.0);
1693        }
1694        assert!(n.v.is_finite());
1695    }
1696    #[test]
1697    fn fhr_nan_no_panic() {
1698        FitzHughRinzelNeuron::new().step(f64::NAN);
1699    }
1700    #[test]
1701    fn fhr_negative_no_crash() {
1702        let mut n = FitzHughRinzelNeuron::new();
1703        for _ in 0..500 {
1704            n.step(-5.0);
1705        }
1706        assert!(n.v.is_finite());
1707    }
1708    #[test]
1709    fn fhr_slow_y_variable() {
1710        let mut n = FitzHughRinzelNeuron::new();
1711        let y0 = n.y;
1712        for _ in 0..2000 {
1713            n.step(1.0);
1714        }
1715        assert!((n.y - y0).abs() > 1e-6, "slow y should evolve in 3D model");
1716    }
1717
1718    // -- McKean --
1719    #[test]
1720    fn mckean_reset_clears_state() {
1721        let mut n = McKeanNeuron::new();
1722        for _ in 0..500 {
1723            n.step(0.5);
1724        }
1725        n.reset();
1726        assert!((n.v - 0.0).abs() < 1e-10);
1727    }
1728    #[test]
1729    fn mckean_bounded() {
1730        let mut n = McKeanNeuron::new();
1731        for _ in 0..2000 {
1732            n.step(50.0);
1733        }
1734        assert!(n.v.is_finite());
1735    }
1736    #[test]
1737    fn mckean_nan_no_panic() {
1738        McKeanNeuron::new().step(f64::NAN);
1739    }
1740    #[test]
1741    fn mckean_negative_no_crash() {
1742        let mut n = McKeanNeuron::new();
1743        for _ in 0..500 {
1744            n.step(-5.0);
1745        }
1746        assert!(n.v.is_finite());
1747    }
1748
1749    // -- TermanWang --
1750    #[test]
1751    fn tw_reset_clears_state() {
1752        let mut n = TermanWangOscillator::new();
1753        for _ in 0..500 {
1754            n.step(0.5);
1755        }
1756        n.reset();
1757        assert!((n.v - (-1.5)).abs() < 1e-10);
1758    }
1759    #[test]
1760    fn tw_bounded() {
1761        let mut n = TermanWangOscillator::new();
1762        for _ in 0..2000 {
1763            n.step(50.0);
1764        }
1765        assert!(n.v.is_finite());
1766    }
1767    #[test]
1768    fn tw_nan_no_panic() {
1769        TermanWangOscillator::new().step(f64::NAN);
1770    }
1771    #[test]
1772    fn tw_negative_no_crash() {
1773        let mut n = TermanWangOscillator::new();
1774        for _ in 0..500 {
1775            n.step(-5.0);
1776        }
1777        assert!(n.v.is_finite());
1778    }
1779
1780    // -- BendaHerz --
1781    #[test]
1782    fn benda_herz_reset_clears_state() {
1783        let mut n = BendaHerzNeuron::new(42);
1784        for _ in 0..100 {
1785            n.step(20.0);
1786        }
1787        n.reset();
1788        assert!((n.a - 0.0).abs() < 1e-10);
1789    }
1790    #[test]
1791    fn benda_herz_bounded() {
1792        let mut n = BendaHerzNeuron::new(42);
1793        for _ in 0..10000 {
1794            n.step(1e4);
1795        }
1796        assert!(n.a.is_finite());
1797    }
1798    #[test]
1799    fn benda_herz_adaptation() {
1800        let mut n = BendaHerzNeuron::new(42);
1801        for _ in 0..10000 {
1802            n.step(20.0);
1803        }
1804        assert!(n.a > 0.0, "adaptation variable a should increase: {}", n.a);
1805    }
1806    #[test]
1807    fn benda_herz_nan_no_panic() {
1808        BendaHerzNeuron::new(42).step(f64::NAN);
1809    }
1810    #[test]
1811    fn benda_herz_negative_no_crash() {
1812        let mut n = BendaHerzNeuron::new(42);
1813        for _ in 0..500 {
1814            n.step(-10.0);
1815        }
1816        assert!(n.a.is_finite());
1817    }
1818
1819    // -- Alpha --
1820    #[test]
1821    fn alpha_reset_clears_state() {
1822        let mut n = AlphaNeuron::new();
1823        for _ in 0..50 {
1824            n.step(0.5, 0.0);
1825        }
1826        n.reset();
1827        assert!((n.v - 0.0).abs() < 1e-10);
1828    }
1829    #[test]
1830    fn alpha_bounded() {
1831        let mut n = AlphaNeuron::new();
1832        for _ in 0..1000 {
1833            n.step(100.0, 0.0);
1834        }
1835        assert!(n.v.is_finite());
1836    }
1837    #[test]
1838    fn alpha_spike_input_drives() {
1839        let mut n = AlphaNeuron::new();
1840        for _ in 0..100 {
1841            n.step(0.0, 1.0);
1842        }
1843        // Spike input should contribute to synaptic current
1844        assert!(n.v.is_finite());
1845    }
1846    #[test]
1847    fn alpha_nan_no_panic() {
1848        AlphaNeuron::new().step(f64::NAN, 0.0);
1849    }
1850    #[test]
1851    fn alpha_negative_no_crash() {
1852        let mut n = AlphaNeuron::new();
1853        for _ in 0..100 {
1854            n.step(-5.0, 0.0);
1855        }
1856        assert!(n.v.is_finite());
1857    }
1858
1859    // -- COBALIF --
1860    #[test]
1861    fn coba_reset_clears_state() {
1862        let mut n = COBALIFNeuron::new();
1863        for _ in 0..100 {
1864            n.step(500.0, 0.0, 0.0);
1865        }
1866        n.reset();
1867        assert!((n.v - n.e_l).abs() < 1e-10);
1868    }
1869    #[test]
1870    fn coba_bounded() {
1871        let mut n = COBALIFNeuron::new();
1872        for _ in 0..2000 {
1873            n.step(1e5, 0.0, 0.0);
1874        }
1875        assert!(n.v.is_finite());
1876    }
1877    #[test]
1878    fn coba_inhibition_suppresses() {
1879        let mut n_exc = COBALIFNeuron::new();
1880        let mut n_inh = COBALIFNeuron::new();
1881        let t_exc: i32 = (0..2000).map(|_| n_exc.step(500.0, 0.0, 0.0)).sum();
1882        let t_inh: i32 = (0..2000).map(|_| n_inh.step(500.0, 0.0, 5.0)).sum();
1883        assert!(t_inh <= t_exc, "inhibition should reduce spiking");
1884    }
1885    #[test]
1886    fn coba_nan_no_panic() {
1887        COBALIFNeuron::new().step(f64::NAN, 0.0, 0.0);
1888    }
1889    #[test]
1890    fn coba_negative_no_crash() {
1891        let mut n = COBALIFNeuron::new();
1892        for _ in 0..500 {
1893            n.step(-100.0, 0.0, 0.0);
1894        }
1895        assert!(n.v.is_finite());
1896    }
1897
1898    // -- GutkinErmentrout --
1899    #[test]
1900    fn gutkin_reset_clears_state() {
1901        let mut n = GutkinErmentroutNeuron::new();
1902        for _ in 0..500 {
1903            n.step(15.0);
1904        }
1905        n.reset();
1906        assert!((n.v - (-65.0)).abs() < 1e-10);
1907    }
1908    #[test]
1909    fn gutkin_bounded() {
1910        let mut n = GutkinErmentroutNeuron::new();
1911        for _ in 0..2000 {
1912            n.step(1e4);
1913        }
1914        assert!(n.v.is_finite());
1915    }
1916    #[test]
1917    fn gutkin_nan_no_panic() {
1918        GutkinErmentroutNeuron::new().step(f64::NAN);
1919    }
1920    #[test]
1921    fn gutkin_negative_no_crash() {
1922        let mut n = GutkinErmentroutNeuron::new();
1923        for _ in 0..500 {
1924            n.step(-10.0);
1925        }
1926        assert!(n.v.is_finite());
1927    }
1928
1929    // -- WilsonHR --
1930    #[test]
1931    fn wilson_hr_reset_clears_state() {
1932        let mut n = WilsonHRNeuron::new();
1933        for _ in 0..500 {
1934            n.step(0.5);
1935        }
1936        n.reset();
1937        assert!((n.v - (-0.7)).abs() < 1e-10);
1938    }
1939    #[test]
1940    fn wilson_hr_moderate_stable() {
1941        let mut n = WilsonHRNeuron::new();
1942        for _ in 0..2000 {
1943            n.step(1.0);
1944        }
1945        assert!(n.v.is_finite());
1946    }
1947    #[test]
1948    fn wilson_hr_nan_no_panic() {
1949        WilsonHRNeuron::new().step(f64::NAN);
1950    }
1951    #[test]
1952    fn wilson_hr_negative_no_crash() {
1953        let mut n = WilsonHRNeuron::new();
1954        for _ in 0..500 {
1955            n.step(-5.0);
1956        }
1957        assert!(n.v.is_finite());
1958    }
1959
1960    // -- Chay --
1961    #[test]
1962    fn chay_reset_clears_state() {
1963        let mut n = ChayNeuron::new();
1964        for _ in 0..1000 {
1965            n.step(20.0);
1966        }
1967        n.reset();
1968        assert!((n.v - (-50.0)).abs() < 1e-10);
1969    }
1970    #[test]
1971    fn chay_bounded() {
1972        let mut n = ChayNeuron::new();
1973        for _ in 0..5000 {
1974            n.step(200.0);
1975        }
1976        assert!(n.v.is_finite());
1977    }
1978    #[test]
1979    fn chay_ca_nonneg() {
1980        let mut n = ChayNeuron::new();
1981        for _ in 0..5000 {
1982            n.step(20.0);
1983        }
1984        assert!(n.ca >= 0.0, "Ca²⁺ must be non-negative");
1985    }
1986    #[test]
1987    fn chay_nan_no_panic() {
1988        ChayNeuron::new().step(f64::NAN);
1989    }
1990    #[test]
1991    fn chay_negative_no_crash() {
1992        let mut n = ChayNeuron::new();
1993        for _ in 0..500 {
1994            n.step(-10.0);
1995        }
1996        assert!(n.v.is_finite());
1997    }
1998
1999    // -- ChayKeizer --
2000    #[test]
2001    fn chay_keizer_reset_clears_state() {
2002        let mut n = ChayKeizerNeuron::new();
2003        for _ in 0..1000 {
2004            n.step(10.0);
2005        }
2006        n.reset();
2007        assert!((n.v - (-50.0)).abs() < 1e-10);
2008    }
2009    #[test]
2010    fn chay_keizer_bounded() {
2011        let mut n = ChayKeizerNeuron::new();
2012        for _ in 0..5000 {
2013            n.step(100.0);
2014        }
2015        assert!(n.v.is_finite());
2016    }
2017    #[test]
2018    fn chay_keizer_nan_no_panic() {
2019        ChayKeizerNeuron::new().step(f64::NAN);
2020    }
2021    #[test]
2022    fn chay_keizer_negative_no_crash() {
2023        let mut n = ChayKeizerNeuron::new();
2024        for _ in 0..500 {
2025            n.step(-10.0);
2026        }
2027        assert!(n.v.is_finite());
2028    }
2029
2030    // -- ShermanRinzelKeizer --
2031    #[test]
2032    fn srk_reset_clears_state() {
2033        let mut n = ShermanRinzelKeizerNeuron::new();
2034        for _ in 0..1000 {
2035            n.step(5.0);
2036        }
2037        n.reset();
2038        assert!((n.v - (-50.0)).abs() < 1e-10);
2039    }
2040    #[test]
2041    fn srk_bounded() {
2042        let mut n = ShermanRinzelKeizerNeuron::new();
2043        for _ in 0..5000 {
2044            n.step(100.0);
2045        }
2046        assert!(n.v.is_finite());
2047    }
2048    #[test]
2049    fn srk_nan_no_panic() {
2050        ShermanRinzelKeizerNeuron::new().step(f64::NAN);
2051    }
2052    #[test]
2053    fn srk_negative_no_crash() {
2054        let mut n = ShermanRinzelKeizerNeuron::new();
2055        for _ in 0..500 {
2056            n.step(-5.0);
2057        }
2058        assert!(n.v.is_finite());
2059    }
2060
2061    // -- ButeraRespiratory --
2062    #[test]
2063    fn butera_reset_clears_state() {
2064        let mut n = ButeraRespiratoryNeuron::new();
2065        for _ in 0..1000 {
2066            n.step(50.0);
2067        }
2068        n.reset();
2069        assert!((n.v - (-50.0)).abs() < 1e-10);
2070    }
2071    #[test]
2072    fn butera_bounded() {
2073        let mut n = ButeraRespiratoryNeuron::new();
2074        for _ in 0..5000 {
2075            n.step(500.0);
2076        }
2077        assert!(n.v.is_finite());
2078    }
2079    #[test]
2080    fn butera_nan_no_panic() {
2081        ButeraRespiratoryNeuron::new().step(f64::NAN);
2082    }
2083    #[test]
2084    fn butera_negative_no_crash() {
2085        let mut n = ButeraRespiratoryNeuron::new();
2086        for _ in 0..500 {
2087            n.step(-20.0);
2088        }
2089        assert!(n.v.is_finite());
2090    }
2091
2092    // -- EPropALIF --
2093    #[test]
2094    fn eprop_reset_clears_state() {
2095        let mut n = EPropALIFNeuron::default();
2096        for _ in 0..50 {
2097            n.step(0.5);
2098        }
2099        n.reset();
2100        assert!((n.v - 0.0).abs() < 1e-10);
2101    }
2102    #[test]
2103    fn eprop_bounded() {
2104        let mut n = EPropALIFNeuron::default();
2105        for _ in 0..1000 {
2106            n.step(100.0);
2107        }
2108        assert!(n.v.is_finite());
2109    }
2110    #[test]
2111    fn eprop_adaptation() {
2112        let mut n = EPropALIFNeuron::default();
2113        for _ in 0..50 {
2114            n.step(0.5);
2115        }
2116        // a (adaptation) should have increased after spikes
2117        assert!(n.a.is_finite());
2118    }
2119    #[test]
2120    fn eprop_nan_no_panic() {
2121        EPropALIFNeuron::default().step(f64::NAN);
2122    }
2123
2124    // -- SuperSpike --
2125    #[test]
2126    fn superspike_reset_clears_state() {
2127        let mut n = SuperSpikeNeuron::default();
2128        for _ in 0..50 {
2129            n.step(0.5);
2130        }
2131        n.reset();
2132        assert!((n.v - 0.0).abs() < 1e-10);
2133    }
2134    #[test]
2135    fn superspike_bounded() {
2136        let mut n = SuperSpikeNeuron::default();
2137        for _ in 0..1000 {
2138            n.step(100.0);
2139        }
2140        assert!(n.v.is_finite());
2141    }
2142    #[test]
2143    fn superspike_trace_evolves() {
2144        let mut n = SuperSpikeNeuron::default();
2145        for _ in 0..50 {
2146            n.step(0.5);
2147        }
2148        assert!(n.trace.is_finite());
2149    }
2150    #[test]
2151    fn superspike_nan_no_panic() {
2152        SuperSpikeNeuron::default().step(f64::NAN);
2153    }
2154
2155    // -- LearnableNeuron --
2156    #[test]
2157    fn lnm_reset_clears_state() {
2158        let mut n = LearnableNeuronModel::new();
2159        for _ in 0..50 {
2160            n.step(2.0);
2161        }
2162        n.reset();
2163        assert!((n.v - 0.0).abs() < 1e-10);
2164    }
2165    #[test]
2166    fn lnm_bounded() {
2167        let mut n = LearnableNeuronModel::new();
2168        for _ in 0..1000 {
2169            n.step(100.0);
2170        }
2171        assert!(n.v.is_finite());
2172    }
2173    #[test]
2174    fn lnm_nan_no_panic() {
2175        LearnableNeuronModel::new().step(f64::NAN);
2176    }
2177
2178    // -- Pernarowski --
2179    #[test]
2180    fn pernarowski_reset_clears_state() {
2181        let mut n = PernarowskiNeuron::new();
2182        for _ in 0..500 {
2183            n.step(1.0);
2184        }
2185        n.reset();
2186        assert!((n.v - (-1.0)).abs() < 1e-10);
2187    }
2188    #[test]
2189    fn pernarowski_bounded() {
2190        let mut n = PernarowskiNeuron::new();
2191        for _ in 0..2000 {
2192            n.step(50.0);
2193        }
2194        assert!(n.v.is_finite());
2195    }
2196    #[test]
2197    fn pernarowski_slow_z() {
2198        let mut n = PernarowskiNeuron::new();
2199        let z0 = n.z;
2200        for _ in 0..2000 {
2201            n.step(1.0);
2202        }
2203        assert!((n.z - z0).abs() > 1e-6, "slow z should evolve");
2204    }
2205    #[test]
2206    fn pernarowski_nan_no_panic() {
2207        PernarowskiNeuron::new().step(f64::NAN);
2208    }
2209    #[test]
2210    fn pernarowski_negative_no_crash() {
2211        let mut n = PernarowskiNeuron::new();
2212        for _ in 0..500 {
2213            n.step(-5.0);
2214        }
2215        assert!(n.v.is_finite());
2216    }
2217
2218    // -- BrunelWang tests --
2219
2220    #[test]
2221    fn brunel_wang_fires_with_ampa_ext() {
2222        let mut n = BrunelWangNeuron::new();
2223        let mut spikes = 0;
2224        for _ in 0..5000 {
2225            spikes += n.step(5.0);
2226        }
2227        assert!(
2228            spikes > 0,
2229            "Must fire with external AMPA drive, got {spikes}"
2230        );
2231    }
2232
2233    #[test]
2234    fn brunel_wang_silent_without_input() {
2235        let mut n = BrunelWangNeuron::new();
2236        let spikes: i32 = (0..10_000).map(|_| n.step(0.0)).sum();
2237        assert_eq!(spikes, 0, "Must be silent without input");
2238    }
2239
2240    #[test]
2241    fn brunel_wang_nmda_mg_block() {
2242        let n = BrunelWangNeuron::new();
2243        // At resting potential (-70 mV), Mg²⁺ block should be strong
2244        let block_rest = n.nmda_mg_block(-70.0);
2245        // At depolarised potential (0 mV), block should be weak
2246        let block_depol = n.nmda_mg_block(0.0);
2247        assert!(
2248            block_depol > block_rest,
2249            "Mg²⁺ block should weaken with depolarisation: rest={block_rest:.3} depol={block_depol:.3}"
2250        );
2251        // At -70 mV, block factor should be small (< 0.1)
2252        assert!(
2253            block_rest < 0.1,
2254            "Block at -70 mV should be < 0.1, got {block_rest:.3}"
2255        );
2256        // At 0 mV, block factor should be close to 1
2257        assert!(
2258            block_depol > 0.5,
2259            "Block at 0 mV should be > 0.5, got {block_depol:.3}"
2260        );
2261    }
2262
2263    #[test]
2264    fn brunel_wang_full_step_nmda_drive() {
2265        // NMDA recurrent input should drive firing via positive feedback
2266        let mut n = BrunelWangNeuron::new();
2267        n.v = -55.0; // near threshold, Mg²⁺ block partially relieved
2268        let mut spikes = 0;
2269        for _ in 0..1000 {
2270            spikes += n.step_full(0.0, 0.0, 1.0, 0.0); // pure NMDA drive
2271        }
2272        assert!(
2273            spikes > 0,
2274            "NMDA drive at depolarised V should cause spikes"
2275        );
2276    }
2277
2278    #[test]
2279    fn brunel_wang_gaba_suppresses() {
2280        let mut with_gaba = BrunelWangNeuron::new();
2281        let mut no_gaba = BrunelWangNeuron::new();
2282        let mut spikes_gaba = 0;
2283        let mut spikes_no = 0;
2284        for _ in 0..5000 {
2285            spikes_gaba += with_gaba.step_full(3.0, 0.0, 0.0, 1.0); // GABA on
2286            spikes_no += no_gaba.step_full(3.0, 0.0, 0.0, 0.0);
2287        }
2288        assert!(
2289            spikes_no >= spikes_gaba,
2290            "GABA should suppress: no_gaba={spikes_no}, with_gaba={spikes_gaba}"
2291        );
2292    }
2293
2294    #[test]
2295    fn brunel_wang_refractory() {
2296        let mut n = BrunelWangNeuron::new();
2297        // Drive to spike
2298        while n.step(10.0) == 0 {}
2299        // Immediately after spike, should be in refractory
2300        assert!(n.ref_remaining > 0.0, "Should be refractory after spike");
2301        // Should not spike during refractory
2302        assert_eq!(
2303            n.step(100.0),
2304            0,
2305            "Should not spike during refractory period"
2306        );
2307    }
2308
2309    #[test]
2310    fn brunel_wang_reset() {
2311        let mut n = BrunelWangNeuron::new();
2312        for _ in 0..1000 {
2313            n.step(5.0);
2314        }
2315        n.reset();
2316        assert_eq!(n.v, n.v_rest);
2317        assert_eq!(n.ref_remaining, 0.0);
2318    }
2319
2320    #[test]
2321    fn brunel_wang_voltage_bounded() {
2322        let mut n = BrunelWangNeuron::new();
2323        for _ in 0..10_000 {
2324            n.step(100.0);
2325        }
2326        assert!(n.v.is_finite(), "V must stay finite");
2327        // V should stay near v_reset during sustained spiking (refractory resets)
2328        assert!(
2329            n.v <= n.v_threshold,
2330            "V should be at or below threshold (reset clamp)"
2331        );
2332    }
2333
2334    #[test]
2335    fn brunel_wang_nan_input() {
2336        let mut n = BrunelWangNeuron::new();
2337        n.step(f64::NAN);
2338        // NaN input → V becomes NaN. Check we don't panic.
2339    }
2340
2341    #[test]
2342    fn brunel_wang_performance() {
2343        let start = std::time::Instant::now();
2344        let mut n = BrunelWangNeuron::new();
2345        for _ in 0..10_000 {
2346            std::hint::black_box(n.step(3.0));
2347        }
2348        let elapsed = start.elapsed();
2349        assert!(
2350            elapsed.as_millis() < 50,
2351            "10k steps in <50ms, took {}ms",
2352            elapsed.as_millis()
2353        );
2354    }
2355}