This tutorial is designed for computational neuroscientists working with network models of leaky integrate-and-fire (LIF) neurons. It will guide you through how to:
- Generate realistic population-level electrophysiological signals, such as current dipole moments (CDMs) and electroencephalography (EEG) signals.
- Apply inverse modelling techniques to automatically discover potential associations between network model parameters and the resulting signals.
Whether your goal is to improve the biological realism of your simulations of extracelullar signals or to extract interpretable insights from complex neural dynamics, this tutorial offers practical tools and workflows to support your research.
Note: To run this notebook, the NEST simulator must be installed. You can install a pre-built NEST package with:
conda install -c conda-forge nest-simulator=3.8
Similarly, to compute field potentials using the kernel method, the LFPykernels
package must be installed via pip:
pip install LFPykernels
We will begin by running simulations using the Hagen’s leaky integrate-and-fire (LIF) network model, systematically varying the model’s external input, and computing CDMs using the kernel method, as well as EEGs using the New York head model. For detailed instructions on simulating this model and generating extracelullar signals, please refer to the forward modeling tutorial: Forward modelling of neural activity.
import sys
import os
import pickle
import numpy as np
import scipy.signal as ss
import ncpi
# Define the range of external current inputs (J_ext) to be simulated.
J_ext_range = np.linspace(26, 32, 50)
def get_spike_rate(times, transient, dt, tstop):
"""
Compute the spike rate from spike times.
Parameters
----------
times : array
Spike times.
transient : float
Transient time at the start of the simulation.
dt : float
Simulation time step or bin size.
tstop : float
Simulation stop time.
Returns
-------
bins : array
Time bins.
hist : array
Spike rate.
"""
bins = np.arange(transient, tstop + dt, dt)
hist, _ = np.histogram(times, bins=bins)
return bins, hist.astype(float)
# Append the path to parameters of the LIF network model
sys.path.append('../../examples/simulation/Hagen_model/simulation/params')
# Pre-simulated outputs from the multicompartment neuron network model
output_path = os.path.join('/DATA/multicompartment_neuron_network', 'output', 'adb947bfb931a5a8d09ad078a6d256b0')
# Path to the data files of the multicompartment neuron models
multicompartment_neuron_network_path = '/DATA/multicompartment_neuron_network'
# Simulation outputs
CDMs = []
EEGs = []
for k,J_ext in enumerate(J_ext_range):
print(f'Simulating the LIF network model with J_ext = {J_ext} nA')
# Load LIF_params and KernelParams
from network_params import LIF_params
from analysis_params import KernelParams
# Modify the parameter
LIF_params['J_ext'] = J_ext
# Create the Simulation object
sim = ncpi.Simulation(param_folder='../../examples/simulation/Hagen_model/simulation/params',
python_folder='../../examples/simulation/Hagen_model/simulation/python',
output_folder='../../examples/simulation/Hagen_model/simulation/output')
# Save network parameters to a pickle file
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'network.pkl'), 'wb') as f:
pickle.dump(LIF_params, f)
# Run the simulation
sim.simulate('simulation.py', 'simulation_params.py')
# Load spike times
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'times.pkl'), 'rb') as f:
times = pickle.load(f)
# Load gids
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'gids.pkl'), 'rb') as f:
gids = pickle.load(f)
# Load tstop
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'tstop.pkl'), 'rb') as f:
tstop = pickle.load(f)
# Load dt
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'dt.pkl'), 'rb') as f:
dt = pickle.load(f)
# Load X and N_X
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'network.pkl'), 'rb') as f:
LIF_params = pickle.load(f)
P_X = LIF_params['X']
N_X = LIF_params['N_X']
# Transient period
transient = KernelParams.transient
for X in P_X:
gids[X] = gids[X][times[X] >= transient]
times[X] = times[X][times[X] >= transient]
# Compute the kernel
print('Computing the kernel...')
potential = ncpi.FieldPotential()
biophys = ['set_Ih_linearized_hay2011', 'make_cell_uniform']
H_YX = potential.create_kernel(multicompartment_neuron_network_path,
output_path,
KernelParams,
biophys,
dt,
tstop,
electrodeParameters=None,
CDM=True)
# Compute CDM
probe = 'KernelApproxCurrentDipoleMoment'
CDM_data = dict(EE=[], EI=[], IE=[], II=[])
for X in P_X:
for Y in P_X:
# Compute the firing rate
bins, spike_rate = get_spike_rate(times[X], transient, dt, tstop)
# Pick only the z-component of the CDM kernel
kernel = H_YX[f'{X}:{Y}'][probe][2, :]
# CDM
sig = np.convolve(spike_rate, kernel, 'same')
CDM_data[f'{X}{Y}'] = ss.decimate(sig,
q=10,
zero_phase=True)
sum_CDM = np.array(CDM_data['EE'] + CDM_data['EI'] + CDM_data['IE'] + CDM_data['II'],dtype=np.float32)
# Generate EEG signals from the CDM using the New York head model, with the CDM localized at the occipital lobe
potential = ncpi.FieldPotential(kernel=False, nyhead=True, MEEG='EEG', CDM_shape=sum_CDM.shape[0])
EEG = np.array(potential.compute_MEEG(sum_CDM, location = np.array([-24.7, -103.3, -1.46])),dtype=np.float32)
# Collect the simulation outputs
CDMs.append(sum_CDM)
EEGs.append(EEG)
# Save the outputs to files
pickle.dump(CDMs, open('CDMs.pkl', 'wb'))
pickle.dump(EEGs, open('EEGs.pkl', 'wb'))
# Save J_ext range
pickle.dump(J_ext_range, open('J_ext_range.pkl', 'wb'))
We now compute the set of catch22
features from EEG signals. For further details on other feature extraction methods, see the dedicated tutorial on Feature extraction.
# -- here, I had to restart the kernel to compute the features, as the feature extraction process was freezing otherwise.
import pandas as pd
import pickle # reload module
import ncpi # reload module
import os # reload module
import numpy as np # reload module
# Load the CDMs EEGs and J_ext_range
CDMs = pickle.load(open('CDMs.pkl', 'rb'))
EEGs = pickle.load(open('EEGs.pkl', 'rb'))
J_ext_range = pickle.load(open('J_ext_range.pkl', 'rb'))
# Compute the features
df = pd.DataFrame({'Data': EEGs})
features = ncpi.Features(method='catch22')
df = features.compute_features(df)
Next, we’ll take a look at some CDMs, EEGs, and a few of the catch22
features extracted from them.
import matplotlib.pyplot as plt
fig, axs = plt.subplots(4, 3, figsize=(10, 10))
# Time interval in simulation time steps
T = [5000, 5500]
# Load dt
with open(os.path.join('../../examples/simulation/Hagen_model/simulation/output', 'dt.pkl'), 'rb') as f:
dt = pickle.load(f)
# Real time in milliseconds
time = np.arange(T[0], T[1]) * dt # Convert from step index to time in ms
for k,J_ext in enumerate([J_ext_range[0], J_ext_range[15], J_ext_range[30], J_ext_range[45]]):
# Find the index of J_ext in J_ext_range
i = np.where(J_ext_range == J_ext)[0][0]
# Plot the CDM
axs[k, 0].plot(time, CDMs[i][T[0]:T[1]])
axs[k, 0].set_title(f'CDM (J_ext = {np.round(J_ext,2)} nA)')
axs[k, 0].set_xlabel('Time (ms)')
axs[k, 0].set_yticks([])
# Plot the EEG
axs[k, 1].plot(time, EEGs[i][T[0]:T[1]])
axs[k, 1].set_title(f'EEG (J_ext = {np.round(J_ext,2)} nA)')
axs[k, 1].set_xlabel('Time (ms)')
axs[k, 1].set_yticks([])
# Plot the features
axs[k, 2].bar([0], df['Features'][i][4])
axs[k, 2].bar([1], df['Features'][i][18])
axs[k, 2].bar([2], df['Features'][i][19])
axs[k, 2].set_title(f'Features (J_ext = {np.round(J_ext,2)} nA)')
axs[k, 2].set_xticks([0, 1, 2])
axs[k, 2].set_xticklabels(['ami2', 'rs_range', 'dfa'])
plt.tight_layout()
plt.show()
We split the data into 90% for training and 10% for testing, as is typically done in the other tutorials.
# Split simulation data into 90% training and 10% test data
indices = np.arange(len(df))
np.random.shuffle(indices)
split = int(0.9 * len(indices))
train_indices = indices[:split]
test_indices = indices[split:]
X_train = np.array(df.iloc[train_indices]['Features'].tolist())
X_test = np.array(df.iloc[test_indices]['Features'].tolist())
J_ext_train = np.array(J_ext_range)[train_indices]
J_ext_test = np.array(J_ext_range)[test_indices]
Here, we set up and train a model to predict the parameter J_ext
using the features extracted earlier.
For guidance on configuring other regression-based inverse models from scikit-learn or implementing techniques from sbi, see the dedicated tutorial on Inverse modelling.
# Create the inference object and add simulation data
inference = ncpi.Inference(model='Ridge')
inference.add_simulation_data(X_train, J_ext_train)
# Train the model
inference.train(param_grid=None)
Let’s evaluate how well the model performs! Using the test data, we generate predictions and compare them against the true values of J_ext
.
from sklearn.metrics import mean_squared_error
# Evaluate the model using the test data
predictions = inference.predict(X_test)
# Calculate MSE
mse = mean_squared_error(J_ext_test, predictions)
# Plot real vs predicted values
plt.figure(figsize=(8, 6))
plt.scatter(J_ext_test, predictions, alpha=0.5, label=f'MSE = {mse:.3f}')
plt.plot([J_ext_test.min(), J_ext_test.max()], [J_ext_test.min(), J_ext_test.max()], 'r--', label='Ideal Fit')
plt.xlabel(r'$\mathrm{True\ } J_{ext}\ \mathrm{(nA)}$')
plt.ylabel(r'$\mathrm{Predicted\ } J_{ext}\ \mathrm{(nA)}$')
plt.legend()
plt.show()