Skip to content

Spike Augmentation

Spike-aware data augmentation: temporal jitter, spike dropout, rate scaling, noise injection, time reversal. Preserves spike structure unlike image augmentation.

from sc_neurocore.augmentation import SpikeAugmenter

aug = SpikeAugmenter(jitter_ms=1.0, dropout_rate=0.1)
augmented = aug.transform(spike_train)

See Tutorial 57: Spike Augmentation.

sc_neurocore.augmentation

Spike-aware augmentation and curriculum scheduling for SNN training.

SpikeAugment dataclass

Composable spike-domain augmentation.

Parameters

jitter_steps : int Max temporal jitter in timesteps (spikes shift +/- jitter). dropout_rate : float Probability of dropping each spike (0.0 = none, 1.0 = all). rate_scale : tuple of float (min_scale, max_scale) for random firing rate scaling. polarity_flip_prob : float Probability of flipping spike polarity (for DVS ON/OFF channels). bg_noise_rate : float Background noise spike probability per neuron per step. hot_pixel_prob : float Probability of a neuron becoming a hot pixel (fires every step). seed : int Random seed for reproducibility.

Source code in src/sc_neurocore/augmentation/spike_augment.py
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
@dataclass
class SpikeAugment:
    """Composable spike-domain augmentation.

    Parameters
    ----------
    jitter_steps : int
        Max temporal jitter in timesteps (spikes shift +/- jitter).
    dropout_rate : float
        Probability of dropping each spike (0.0 = none, 1.0 = all).
    rate_scale : tuple of float
        (min_scale, max_scale) for random firing rate scaling.
    polarity_flip_prob : float
        Probability of flipping spike polarity (for DVS ON/OFF channels).
    bg_noise_rate : float
        Background noise spike probability per neuron per step.
    hot_pixel_prob : float
        Probability of a neuron becoming a hot pixel (fires every step).
    seed : int
        Random seed for reproducibility.
    """

    jitter_steps: int = 0
    dropout_rate: float = 0.0
    rate_scale: tuple[float, float] = (1.0, 1.0)
    polarity_flip_prob: float = 0.0
    bg_noise_rate: float = 0.0
    hot_pixel_prob: float = 0.0
    seed: int = 42

    def __call__(self, spikes: np.ndarray) -> np.ndarray:
        """Apply all augmentations to a spike tensor.

        Parameters
        ----------
        spikes : ndarray of shape (T, n_neurons)
            Binary spike matrix.

        Returns
        -------
        ndarray of same shape
            Augmented spike matrix.
        """
        rng = np.random.RandomState(self.seed)
        out = spikes.copy().astype(np.float64)

        if self.jitter_steps > 0:
            out = self._temporal_jitter(out, rng)

        if self.dropout_rate > 0:
            out = self._spike_dropout(out, rng)

        if self.rate_scale != (1.0, 1.0):
            out = self._rate_scaling(out, rng)

        if self.polarity_flip_prob > 0:
            out = self._polarity_flip(out, rng)

        if self.bg_noise_rate > 0:
            out = self._background_noise(out, rng)

        if self.hot_pixel_prob > 0:
            out = self._hot_pixel(out, rng)

        return np.clip(out, 0, 1).astype(spikes.dtype)

    def _temporal_jitter(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        T, N = spikes.shape
        result = np.zeros_like(spikes)
        for t in range(T):
            for n in range(N):
                if spikes[t, n] > 0:
                    shift = rng.randint(-self.jitter_steps, self.jitter_steps + 1)
                    new_t = max(0, min(T - 1, t + shift))
                    result[new_t, n] = 1.0
        return result

    def _spike_dropout(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        mask = rng.random(spikes.shape) > self.dropout_rate
        return spikes * mask

    def _rate_scaling(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        lo, hi = self.rate_scale
        scale = rng.uniform(lo, hi)
        if scale >= 1.0:  # pragma: no cover
            return spikes
        # Probabilistically drop spikes to reduce rate
        keep_prob = scale
        mask = rng.random(spikes.shape) < keep_prob
        return spikes * mask

    def _polarity_flip(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        T, N = spikes.shape
        if N % 2 != 0:
            return spikes
        result = spikes.copy()
        if rng.random() < self.polarity_flip_prob:
            half = N // 2
            result[:, :half], result[:, half:] = spikes[:, half:].copy(), spikes[:, :half].copy()
        return result

    def _background_noise(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        noise = (rng.random(spikes.shape) < self.bg_noise_rate).astype(np.float64)
        return np.clip(spikes + noise, 0, 1)

    def _hot_pixel(self, spikes: np.ndarray, rng: np.random.RandomState) -> np.ndarray:
        T, N = spikes.shape
        hot_mask = rng.random(N) < self.hot_pixel_prob
        result = spikes.copy()
        result[:, hot_mask] = 1.0
        return result

__call__(spikes)

Apply all augmentations to a spike tensor.

Parameters

spikes : ndarray of shape (T, n_neurons) Binary spike matrix.

Returns

ndarray of same shape Augmented spike matrix.

Source code in src/sc_neurocore/augmentation/spike_augment.py
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
def __call__(self, spikes: np.ndarray) -> np.ndarray:
    """Apply all augmentations to a spike tensor.

    Parameters
    ----------
    spikes : ndarray of shape (T, n_neurons)
        Binary spike matrix.

    Returns
    -------
    ndarray of same shape
        Augmented spike matrix.
    """
    rng = np.random.RandomState(self.seed)
    out = spikes.copy().astype(np.float64)

    if self.jitter_steps > 0:
        out = self._temporal_jitter(out, rng)

    if self.dropout_rate > 0:
        out = self._spike_dropout(out, rng)

    if self.rate_scale != (1.0, 1.0):
        out = self._rate_scaling(out, rng)

    if self.polarity_flip_prob > 0:
        out = self._polarity_flip(out, rng)

    if self.bg_noise_rate > 0:
        out = self._background_noise(out, rng)

    if self.hot_pixel_prob > 0:
        out = self._hot_pixel(out, rng)

    return np.clip(out, 0, 1).astype(spikes.dtype)

SpikeCurriculum dataclass

Schedule training difficulty across epochs.

Parameters

total_epochs : int Total training epochs. start_timesteps : int Initial sequence length. end_timesteps : int Final sequence length. start_rate_scale : float Initial firing rate multiplier (>1 = amplified = easier). end_rate_scale : float Final firing rate multiplier (1.0 = natural). start_noise : float Initial background noise rate. end_noise : float Final background noise rate. warmup_fraction : float Fraction of epochs for linear warmup (0.0-1.0).

Source code in src/sc_neurocore/augmentation/curriculum.py
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
@dataclass
class SpikeCurriculum:
    """Schedule training difficulty across epochs.

    Parameters
    ----------
    total_epochs : int
        Total training epochs.
    start_timesteps : int
        Initial sequence length.
    end_timesteps : int
        Final sequence length.
    start_rate_scale : float
        Initial firing rate multiplier (>1 = amplified = easier).
    end_rate_scale : float
        Final firing rate multiplier (1.0 = natural).
    start_noise : float
        Initial background noise rate.
    end_noise : float
        Final background noise rate.
    warmup_fraction : float
        Fraction of epochs for linear warmup (0.0-1.0).
    """

    total_epochs: int
    start_timesteps: int = 10
    end_timesteps: int = 100
    start_rate_scale: float = 2.0
    end_rate_scale: float = 1.0
    start_noise: float = 0.0
    end_noise: float = 0.05
    warmup_fraction: float = 0.3

    def _progress(self, epoch: int) -> float:
        """Compute curriculum progress in [0, 1]."""
        warmup_end = int(self.total_epochs * self.warmup_fraction)
        if warmup_end <= 0:
            return 1.0
        return min(1.0, epoch / warmup_end)

    def timesteps(self, epoch: int) -> int:
        """Sequence length for this epoch."""
        p = self._progress(epoch)
        return int(self.start_timesteps + p * (self.end_timesteps - self.start_timesteps))

    def rate_scale(self, epoch: int) -> float:
        """Firing rate multiplier for this epoch."""
        p = self._progress(epoch)
        return self.start_rate_scale + p * (self.end_rate_scale - self.start_rate_scale)

    def noise_rate(self, epoch: int) -> float:
        """Background noise rate for this epoch."""
        p = self._progress(epoch)
        return self.start_noise + p * (self.end_noise - self.start_noise)

    def apply_to_spikes(self, spikes: np.ndarray, epoch: int, seed: int = 0) -> np.ndarray:
        """Apply curriculum-scheduled transforms to a spike tensor.

        Parameters
        ----------
        spikes : ndarray of shape (T, n_neurons)
        epoch : int
        seed : int

        Returns
        -------
        ndarray
            Transformed spikes (possibly truncated/padded to scheduled T).
        """
        rng = np.random.RandomState(seed)
        T_target = self.timesteps(epoch)
        T_actual = spikes.shape[0]

        # Truncate or pad to scheduled length
        if T_actual > T_target:
            out = spikes[:T_target].copy()
        elif T_actual < T_target:
            pad = np.zeros((T_target - T_actual, spikes.shape[1]), dtype=spikes.dtype)
            out = np.concatenate([spikes, pad], axis=0)
        else:
            out = spikes.copy()

        out = out.astype(np.float64)

        # Rate scaling (probabilistic spike duplication or dropout)
        scale = self.rate_scale(epoch)
        if scale < 1.0:  # pragma: no cover
            mask = rng.random(out.shape) < scale
            out = out * mask
        elif scale > 1.0:
            extra = (rng.random(out.shape) < (scale - 1.0)).astype(np.float64)
            out = np.clip(out + extra * (1 - out), 0, 1)

        # Add noise
        noise = self.noise_rate(epoch)
        if noise > 0:  # pragma: no cover
            noise_spikes = (rng.random(out.shape) < noise).astype(np.float64)
            out = np.clip(out + noise_spikes, 0, 1)

        return out.astype(spikes.dtype)

    def schedule_summary(self) -> str:
        """Print the curriculum schedule."""
        lines = ["Epoch | T    | Rate Scale | Noise"]
        lines.append("-" * 40)
        for e in range(0, self.total_epochs, max(1, self.total_epochs // 10)):
            lines.append(
                f"{e:5d} | {self.timesteps(e):4d} | {self.rate_scale(e):10.2f} | {self.noise_rate(e):.4f}"
            )
        lines.append(
            f"{self.total_epochs:5d} | {self.timesteps(self.total_epochs):4d} | "
            f"{self.rate_scale(self.total_epochs):10.2f} | {self.noise_rate(self.total_epochs):.4f}"
        )
        return "\n".join(lines)

timesteps(epoch)

Sequence length for this epoch.

Source code in src/sc_neurocore/augmentation/curriculum.py
67
68
69
70
def timesteps(self, epoch: int) -> int:
    """Sequence length for this epoch."""
    p = self._progress(epoch)
    return int(self.start_timesteps + p * (self.end_timesteps - self.start_timesteps))

rate_scale(epoch)

Firing rate multiplier for this epoch.

Source code in src/sc_neurocore/augmentation/curriculum.py
72
73
74
75
def rate_scale(self, epoch: int) -> float:
    """Firing rate multiplier for this epoch."""
    p = self._progress(epoch)
    return self.start_rate_scale + p * (self.end_rate_scale - self.start_rate_scale)

noise_rate(epoch)

Background noise rate for this epoch.

Source code in src/sc_neurocore/augmentation/curriculum.py
77
78
79
80
def noise_rate(self, epoch: int) -> float:
    """Background noise rate for this epoch."""
    p = self._progress(epoch)
    return self.start_noise + p * (self.end_noise - self.start_noise)

apply_to_spikes(spikes, epoch, seed=0)

Apply curriculum-scheduled transforms to a spike tensor.

Parameters

spikes : ndarray of shape (T, n_neurons) epoch : int seed : int

Returns

ndarray Transformed spikes (possibly truncated/padded to scheduled T).

Source code in src/sc_neurocore/augmentation/curriculum.py
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def apply_to_spikes(self, spikes: np.ndarray, epoch: int, seed: int = 0) -> np.ndarray:
    """Apply curriculum-scheduled transforms to a spike tensor.

    Parameters
    ----------
    spikes : ndarray of shape (T, n_neurons)
    epoch : int
    seed : int

    Returns
    -------
    ndarray
        Transformed spikes (possibly truncated/padded to scheduled T).
    """
    rng = np.random.RandomState(seed)
    T_target = self.timesteps(epoch)
    T_actual = spikes.shape[0]

    # Truncate or pad to scheduled length
    if T_actual > T_target:
        out = spikes[:T_target].copy()
    elif T_actual < T_target:
        pad = np.zeros((T_target - T_actual, spikes.shape[1]), dtype=spikes.dtype)
        out = np.concatenate([spikes, pad], axis=0)
    else:
        out = spikes.copy()

    out = out.astype(np.float64)

    # Rate scaling (probabilistic spike duplication or dropout)
    scale = self.rate_scale(epoch)
    if scale < 1.0:  # pragma: no cover
        mask = rng.random(out.shape) < scale
        out = out * mask
    elif scale > 1.0:
        extra = (rng.random(out.shape) < (scale - 1.0)).astype(np.float64)
        out = np.clip(out + extra * (1 - out), 0, 1)

    # Add noise
    noise = self.noise_rate(epoch)
    if noise > 0:  # pragma: no cover
        noise_spikes = (rng.random(out.shape) < noise).astype(np.float64)
        out = np.clip(out + noise_spikes, 0, 1)

    return out.astype(spikes.dtype)

schedule_summary()

Print the curriculum schedule.

Source code in src/sc_neurocore/augmentation/curriculum.py
128
129
130
131
132
133
134
135
136
137
138
139
140
def schedule_summary(self) -> str:
    """Print the curriculum schedule."""
    lines = ["Epoch | T    | Rate Scale | Noise"]
    lines.append("-" * 40)
    for e in range(0, self.total_epochs, max(1, self.total_epochs // 10)):
        lines.append(
            f"{e:5d} | {self.timesteps(e):4d} | {self.rate_scale(e):10.2f} | {self.noise_rate(e):.4f}"
        )
    lines.append(
        f"{self.total_epochs:5d} | {self.timesteps(self.total_epochs):4d} | "
        f"{self.rate_scale(self.total_epochs):10.2f} | {self.noise_rate(self.total_epochs):.4f}"
    )
    return "\n".join(lines)