Skip to content

Spike Encoding Zoo + Auto-Optimizer

7 encoding schemes + automatic selection based on data characteristics.

Encoders

sc_neurocore.encoding.encoders

7 spike encoding schemes: rate, latency, delta, phase, burst, rank-order, sigma-delta.

No framework provides all 7 in one place with consistent API.

rate_encode(values, T, seed=42)

Rate coding: spike probability proportional to value.

Parameters

values : ndarray of shape (N,) Input values in [0, 1]. T : int Number of timesteps. seed : int

Returns

ndarray of shape (T, N), binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
def rate_encode(values: np.ndarray, T: int, seed: int = 42) -> np.ndarray:
    """Rate coding: spike probability proportional to value.

    Parameters
    ----------
    values : ndarray of shape (N,)
        Input values in [0, 1].
    T : int
        Number of timesteps.
    seed : int

    Returns
    -------
    ndarray of shape (T, N), binary
    """
    rng = np.random.RandomState(seed)
    rates = np.clip(values, 0, 1)
    return (rng.random((T, len(rates))) < rates[np.newaxis, :]).astype(np.int8)

latency_encode(values, T)

Latency (Time-to-First-Spike) coding: higher value = earlier spike.

Parameters

values : ndarray of shape (N,) Input values in [0, 1]. T : int

Returns

ndarray of shape (T, N), binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
def latency_encode(values: np.ndarray, T: int) -> np.ndarray:
    """Latency (Time-to-First-Spike) coding: higher value = earlier spike.

    Parameters
    ----------
    values : ndarray of shape (N,)
        Input values in [0, 1].
    T : int

    Returns
    -------
    ndarray of shape (T, N), binary
    """
    spikes = np.zeros((T, len(values)), dtype=np.int8)
    for i, v in enumerate(values):
        if v > 0:
            t_spike = max(0, int((1.0 - np.clip(v, 0, 1)) * (T - 1)))
            spikes[t_spike, i] = 1
    return spikes

delta_encode(values, threshold=0.1)

Delta coding: spike when change exceeds threshold.

Parameters

values : ndarray of shape (T,) or (T, N) Time-varying input signal. threshold : float

Returns

ndarray of same shape, binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
def delta_encode(values: np.ndarray, threshold: float = 0.1) -> np.ndarray:
    """Delta coding: spike when change exceeds threshold.

    Parameters
    ----------
    values : ndarray of shape (T,) or (T, N)
        Time-varying input signal.
    threshold : float

    Returns
    -------
    ndarray of same shape, binary
    """
    if values.ndim == 1:
        values = values[:, np.newaxis]
    diff = np.abs(np.diff(values, axis=0, prepend=values[:1]))
    return (diff > threshold).astype(np.int8)

phase_encode(values, T, n_phases=8)

Phase coding: value encoded as spike phase within oscillation cycle.

Parameters

values : ndarray of shape (N,) Input values in [0, 1]. T : int n_phases : int Number of phase bins per cycle.

Returns

ndarray of shape (T, N), binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
def phase_encode(values: np.ndarray, T: int, n_phases: int = 8) -> np.ndarray:
    """Phase coding: value encoded as spike phase within oscillation cycle.

    Parameters
    ----------
    values : ndarray of shape (N,)
        Input values in [0, 1].
    T : int
    n_phases : int
        Number of phase bins per cycle.

    Returns
    -------
    ndarray of shape (T, N), binary
    """
    spikes = np.zeros((T, len(values)), dtype=np.int8)
    for i, v in enumerate(values):
        phase = int(np.clip(v, 0, 1) * (n_phases - 1))
        for t in range(phase, T, n_phases):
            spikes[t, i] = 1
    return spikes

burst_encode(values, T, max_burst=5)

Burst coding: value encoded as burst length (consecutive spikes).

Parameters

values : ndarray of shape (N,) Input values in [0, 1]. T : int max_burst : int Maximum burst length for value=1.

Returns

ndarray of shape (T, N), binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
def burst_encode(values: np.ndarray, T: int, max_burst: int = 5) -> np.ndarray:
    """Burst coding: value encoded as burst length (consecutive spikes).

    Parameters
    ----------
    values : ndarray of shape (N,)
        Input values in [0, 1].
    T : int
    max_burst : int
        Maximum burst length for value=1.

    Returns
    -------
    ndarray of shape (T, N), binary
    """
    spikes = np.zeros((T, len(values)), dtype=np.int8)
    for i, v in enumerate(values):
        burst_len = max(1, int(np.clip(v, 0, 1) * max_burst))
        for t in range(min(burst_len, T)):
            spikes[t, i] = 1
    return spikes

rank_order_encode(values, T)

Rank-order coding: neurons fire in order of decreasing value.

Parameters

values : ndarray of shape (N,) T : int

Returns

ndarray of shape (T, N), binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
def rank_order_encode(values: np.ndarray, T: int) -> np.ndarray:
    """Rank-order coding: neurons fire in order of decreasing value.

    Parameters
    ----------
    values : ndarray of shape (N,)
    T : int

    Returns
    -------
    ndarray of shape (T, N), binary
    """
    N = len(values)
    spikes = np.zeros((T, N), dtype=np.int8)
    order = np.argsort(-values)  # highest first
    for rank, neuron_idx in enumerate(order):
        t = min(rank, T - 1)
        if values[neuron_idx] > 0:
            spikes[t, neuron_idx] = 1
    return spikes

sigma_delta_encode(values, threshold=0.1)

Sigma-delta coding: integrate error, spike when threshold exceeded.

Parameters

values : ndarray of shape (T,) or (T, N) Time-varying signal. threshold : float

Returns

ndarray of same shape, binary

Source code in src/sc_neurocore/encoding/encoders.py
Python
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
def sigma_delta_encode(values: np.ndarray, threshold: float = 0.1) -> np.ndarray:
    """Sigma-delta coding: integrate error, spike when threshold exceeded.

    Parameters
    ----------
    values : ndarray of shape (T,) or (T, N)
        Time-varying signal.
    threshold : float

    Returns
    -------
    ndarray of same shape, binary
    """
    if values.ndim == 1:
        values = values[:, np.newaxis]
    T, N = values.shape
    spikes = np.zeros((T, N), dtype=np.int8)
    integrator = np.zeros(N)
    reconstructed = np.zeros(N)

    for t in range(T):
        error = values[t] - reconstructed
        integrator += error
        fire = np.abs(integrator) >= threshold
        spikes[t] = fire.astype(np.int8)
        reconstructed += np.sign(integrator) * fire * threshold
        integrator -= np.sign(integrator) * fire * threshold

    return spikes

Optimizer

sc_neurocore.encoding.optimizer

Auto-select optimal encoding scheme based on data characteristics.

Profiles input data (sparsity, temporal structure, dynamic range) and scores each encoding scheme. Returns ranked recommendations.

No framework provides automatic encoding selection.

EncodingOptimizer

Profile data and recommend optimal spike encoding.

Parameters

T : int Number of simulation timesteps.

Source code in src/sc_neurocore/encoding/optimizer.py
Python
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
class EncodingOptimizer:
    """Profile data and recommend optimal spike encoding.

    Parameters
    ----------
    T : int
        Number of simulation timesteps.
    """

    def __init__(self, T: int = 32):
        self.T = T

    def profile(self, data: np.ndarray) -> dict:
        """Compute data statistics relevant to encoding selection.

        Parameters
        ----------
        data : ndarray of shape (N,) or (T_data, N)
            Input data (values should be in [0, 1] or will be normalized).

        Returns
        -------
        dict with: mean, std, sparsity, temporal_autocorrelation, dynamic_range
        """
        d = data.astype(np.float64)
        if d.max() > 1.0 or d.min() < 0.0:
            d = (d - d.min()) / max(d.max() - d.min(), 1e-8)

        stats = {
            "mean": float(d.mean()),
            "std": float(d.std()),
            "sparsity": float(np.mean(d < 0.01)),
            "dynamic_range": float(d.max() - d.min()),
        }

        if d.ndim == 2 and d.shape[0] > 1:
            autocorr = (
                float(
                    np.mean(
                        [
                            np.corrcoef(d[:-1, i], d[1:, i])[0, 1]
                            for i in range(d.shape[1])
                            if np.std(d[:, i]) > 1e-8
                        ]
                    )
                )
                if d.shape[1] > 0
                else 0.0
            )
            stats["temporal_autocorrelation"] = autocorr
        else:
            stats["temporal_autocorrelation"] = 0.0

        return stats

    def recommend(self, data: np.ndarray) -> list[EncodingRecommendation]:
        """Recommend encoding schemes ranked by suitability.

        Parameters
        ----------
        data : ndarray of shape (N,) or (T_data, N)

        Returns
        -------
        list of EncodingRecommendation, sorted by score descending
        """
        stats = self.profile(data)
        recs = []

        # Normalize data to [0, 1] for encoding
        d = data.astype(np.float64).ravel() if data.ndim == 1 else data.astype(np.float64)
        if d.max() > 1.0 or d.min() < 0.0:
            d = (d - d.min()) / max(d.max() - d.min(), 1e-8)

        sample = d[:100] if d.ndim == 1 else d[0, :100] if d.ndim == 2 else d.ravel()[:100]

        # Score each encoding
        for name, enc_fn, score_fn in self._encodings():
            encoded = enc_fn(sample, self.T) if name != "delta" and name != "sigma_delta" else None
            if encoded is not None:
                sparsity = float(1.0 - encoded.mean())
                info = self._info_score(sample, encoded)
            else:  # pragma: no cover
                sparsity = 0.5
                info = 0.5

            base_score = score_fn(stats)
            final_score = 0.5 * base_score + 0.3 * info + 0.2 * (0.5 + 0.5 * sparsity)

            recs.append(
                EncodingRecommendation(
                    encoding=name,
                    score=float(np.clip(final_score, 0, 1)),
                    sparsity=sparsity,
                    info_preserved=info,
                    reason=self._reason(name, stats),
                )
            )

        recs.sort(key=lambda r: r.score, reverse=True)
        return recs

    def _info_score(self, original: np.ndarray, encoded: np.ndarray) -> float:
        """Estimate how well encoding preserves input information."""
        decoded_approx = encoded.mean(axis=0)
        if len(decoded_approx) != len(original):  # pragma: no cover
            return 0.5
        corr = np.corrcoef(original, decoded_approx)[0, 1]
        return float(max(0, corr)) if np.isfinite(corr) else 0.0

    def _encodings(self) -> list[tuple[str, Any, Any]]:
        return [
            ("rate", encoders.rate_encode, lambda s: 0.7 + 0.3 * (1 - s["sparsity"])),
            ("latency", encoders.latency_encode, lambda s: 0.8 if s["sparsity"] < 0.5 else 0.4),
            (
                "phase",
                encoders.phase_encode,
                lambda s: 0.6 + 0.3 * s.get("temporal_autocorrelation", 0),
            ),
            ("burst", encoders.burst_encode, lambda s: 0.5 + 0.3 * s["dynamic_range"]),
            ("rank_order", encoders.rank_order_encode, lambda s: 0.7 if s["std"] > 0.2 else 0.3),
        ]

    def _reason(self, name: str, stats: dict) -> str:
        reasons = {
            "rate": "Good general-purpose encoding, works well with diverse data",
            "latency": "Low-latency single-spike encoding, energy-efficient",
            "phase": "Captures periodic structure in temporal data",
            "burst": "Preserves intensity information in burst length",
            "rank_order": "Exploits relative ordering, good for high-variance data",
        }
        return reasons.get(name, "")

profile(data)

Compute data statistics relevant to encoding selection.

Parameters

data : ndarray of shape (N,) or (T_data, N) Input data (values should be in [0, 1] or will be normalized).

Returns

dict with: mean, std, sparsity, temporal_autocorrelation, dynamic_range

Source code in src/sc_neurocore/encoding/optimizer.py
Python
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
def profile(self, data: np.ndarray) -> dict:
    """Compute data statistics relevant to encoding selection.

    Parameters
    ----------
    data : ndarray of shape (N,) or (T_data, N)
        Input data (values should be in [0, 1] or will be normalized).

    Returns
    -------
    dict with: mean, std, sparsity, temporal_autocorrelation, dynamic_range
    """
    d = data.astype(np.float64)
    if d.max() > 1.0 or d.min() < 0.0:
        d = (d - d.min()) / max(d.max() - d.min(), 1e-8)

    stats = {
        "mean": float(d.mean()),
        "std": float(d.std()),
        "sparsity": float(np.mean(d < 0.01)),
        "dynamic_range": float(d.max() - d.min()),
    }

    if d.ndim == 2 and d.shape[0] > 1:
        autocorr = (
            float(
                np.mean(
                    [
                        np.corrcoef(d[:-1, i], d[1:, i])[0, 1]
                        for i in range(d.shape[1])
                        if np.std(d[:, i]) > 1e-8
                    ]
                )
            )
            if d.shape[1] > 0
            else 0.0
        )
        stats["temporal_autocorrelation"] = autocorr
    else:
        stats["temporal_autocorrelation"] = 0.0

    return stats

recommend(data)

Recommend encoding schemes ranked by suitability.

Parameters

data : ndarray of shape (N,) or (T_data, N)

Returns

list of EncodingRecommendation, sorted by score descending

Source code in src/sc_neurocore/encoding/optimizer.py
Python
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
def recommend(self, data: np.ndarray) -> list[EncodingRecommendation]:
    """Recommend encoding schemes ranked by suitability.

    Parameters
    ----------
    data : ndarray of shape (N,) or (T_data, N)

    Returns
    -------
    list of EncodingRecommendation, sorted by score descending
    """
    stats = self.profile(data)
    recs = []

    # Normalize data to [0, 1] for encoding
    d = data.astype(np.float64).ravel() if data.ndim == 1 else data.astype(np.float64)
    if d.max() > 1.0 or d.min() < 0.0:
        d = (d - d.min()) / max(d.max() - d.min(), 1e-8)

    sample = d[:100] if d.ndim == 1 else d[0, :100] if d.ndim == 2 else d.ravel()[:100]

    # Score each encoding
    for name, enc_fn, score_fn in self._encodings():
        encoded = enc_fn(sample, self.T) if name != "delta" and name != "sigma_delta" else None
        if encoded is not None:
            sparsity = float(1.0 - encoded.mean())
            info = self._info_score(sample, encoded)
        else:  # pragma: no cover
            sparsity = 0.5
            info = 0.5

        base_score = score_fn(stats)
        final_score = 0.5 * base_score + 0.3 * info + 0.2 * (0.5 + 0.5 * sparsity)

        recs.append(
            EncodingRecommendation(
                encoding=name,
                score=float(np.clip(final_score, 0, 1)),
                sparsity=sparsity,
                info_preserved=info,
                reason=self._reason(name, stats),
            )
        )

    recs.sort(key=lambda r: r.score, reverse=True)
    return recs

EncodingRecommendation dataclass

Recommendation for one encoding scheme.

Source code in src/sc_neurocore/encoding/optimizer.py
Python
27
28
29
30
31
32
33
34
35
@dataclass
class EncodingRecommendation:
    """Recommendation for one encoding scheme."""

    encoding: str
    score: float  # 0-1, higher is better
    sparsity: float  # fraction of zeros in encoded output
    info_preserved: float  # estimated information preservation
    reason: str