Hi @dasdiptyajit and many thanks for your answer!
-
I thought the same, but if I don’t baseline correct my stc I get the entire brain activated, as below. I did this because I read someone was doing this in the forum. Should I stop baselining stc?
-
I thought I did apply the morphed matrix to stc data by doing this:
morph_mat = mne.compute_source_morph(
src=inverse_operator['src'], subject_to='fsaverage',
spacing=fsave_vertices, subjects_dir=subjects_dir).morph_mat
Am I doing it right? After, reshaping and taking the dot product, my stc data matrix looks like this:
In [185]: X.shape
Out[185]: (17, 101, 20484)
with 17 subjects, 101 time points and 20484 vertices
You can see my entire script below (after that you can also find the answer to you third question):
My code:
#%%
import os.path as op
from mne.datasets import fetch_fsaverage
import pathlib
from scipy import stats as stats
import numpy as np
import mne
import matplotlib.pyplot as plt
import glob
from mne.minimum_norm import apply_inverse, read_inverse_operator
from mne.stats import spatio_temporal_cluster_test
#%%
# SOURCE ESTIMATION
# =============================================================================
# Compute the forward operator from EEG data using the standard template MRI subject fsaverage.
# =============================================================================
# Download fsaverage files
fs_dir = fetch_fsaverage(verbose=True)
subjects_dir = op.dirname(fs_dir)
# The files live in:
subject = 'fsaverage'
trans = 'fsaverage' # MNE has a built-in fsaverage transformation
src_fname = op.join(fs_dir, 'bem', 'fsaverage-ico-5-src.fif')
bem = op.join(fs_dir, 'bem', 'fsaverage-5120-5120-5120-bem-sol.fif')
#%%
# Load the data
epochs_PM_alpha = []
epochs_PM_theta = []
epochs_PM_sham = []
morph_alpha = []
morph_theta = []
morph_sham = []
exclude_outliers = ['10',
'19',
'20',
'23',
'24',
'36',
'56',
'58',
'60']
excluded_subjs = []
# PM ERPs
print('1. PREPARE FOR LOOP WITH SUBJECTS')
# Set this to the directory all of the orignial Log_files directories live in
studydir = "/home/.../Epoched"
# Get all the paths!
path_epochs = glob.glob("%s/epochs_PMC_Tfr_p[0-9][0-9][0-9]_[A,T,S]-epo.fif"%(studydir))#subdirs
snr = 3.0
lambda2 = 1.0 / snr ** 2
method = "dSPM" # use dSPM method (could also be MNE, sLORETA, or eLORETA)
tmin = 0.2
tmax = 0.4
for dir in list(path_epochs):
try:
subj=(dir[-12:-10])
session=(dir[-9])
if subj in exclude_outliers:
print('This subject will not be included!')
excluded_subjs.append(str(subj))
continue
if session == 'A':
epochs_Tfr = mne.read_epochs(dir, preload=True)
epochs_Tfr.apply_baseline(baseline=(-0.2, 0))
#epochs_Tfr.decimate(4)
evoked = epochs_Tfr.average()
epochs_Tfr.crop(tmin, tmax)
fname_inv = '%s/p0%s_%s-inv.fif'%(studydir, subj, session)#subdirs''
inverse_operator = read_inverse_operator(fname_inv)
sample_vertices = [s['vertno'] for s in inverse_operator['src']]
# Let's average and compute inverse
condition_alpha = apply_inverse(evoked, inverse_operator, lambda2, method)
condition_alpha = condition_alpha.apply_baseline(baseline=(-0.2,0))
condition_alpha.crop(tmin, tmax)
epochs_PM_alpha.append(condition_alpha.data)
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph_mat = mne.compute_source_morph(
src=inverse_operator['src'], subject_to='fsaverage',
spacing=fsave_vertices, subjects_dir=subjects_dir).morph_mat
morph_alpha.append(morph_mat)
elif session == 'T':
epochs_Tfr = mne.read_epochs(dir, preload=True)
epochs_Tfr.apply_baseline(baseline=(-0.2, 0))
#epochs_Tfr.decimate(4)
evoked = epochs_Tfr.average()
epochs_Tfr.crop(tmin, tmax)
fname_inv = '%s/p0%s_%s-inv.fif'%(studydir, subj, session)#subdirs''
inverse_operator = read_inverse_operator(fname_inv)
sample_vertices = [s['vertno'] for s in inverse_operator['src']]
# Let's average and compute inverse
condition_theta = apply_inverse(evoked, inverse_operator, lambda2, method)
condition_theta = condition_theta.apply_baseline(baseline=(-0.2,0))
condition_theta.crop(tmin, tmax)
epochs_PM_theta.append(condition_theta.data)
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph_mat = mne.compute_source_morph(
src=inverse_operator['src'], subject_to='fsaverage',
spacing=fsave_vertices, subjects_dir=subjects_dir).morph_mat
morph_theta.append(morph_mat)
else:
epochs_Tfr = mne.read_epochs(dir, preload=True)
epochs_Tfr.apply_baseline(baseline=(-0.2, 0))
#epochs_Tfr.decimate(4)
evoked = epochs_Tfr.average()
epochs_Tfr.crop(tmin, tmax)
fname_inv = '%s/p0%s_%s-inv.fif'%(studydir, subj, session)#subdirs''
inverse_operator = read_inverse_operator(fname_inv)
sample_vertices = [s['vertno'] for s in inverse_operator['src']]
# Let's average and compute inverse
condition_sham = apply_inverse(evoked, inverse_operator, lambda2, method)
condition_sham = condition_sham.apply_baseline(baseline=(-0.2,0))
condition_sham.crop(tmin, tmax)
epochs_PM_sham.append(condition_sham.data)
src = mne.read_source_spaces(src_fname)
fsave_vertices = [s['vertno'] for s in src]
morph_mat = mne.compute_source_morph(
src=inverse_operator['src'], subject_to='fsaverage',
spacing=fsave_vertices, subjects_dir=subjects_dir).morph_mat
morph_sham.append(morph_mat)
except:
print('Repeat manually: p%s_%s \n'%(subj,session))
pass
tstep = condition_alpha.tstep * 1000 # convert to milliseconds
#%%
# Save arrays
np.save('/home/.../ERP_Source_alpha.npy', epochs_PM_alpha)
np.save('/home/.../ERP_Source_theta.npy', epochs_PM_theta)
np.save('/home/.../ERP_Source_sham.npy', epochs_PM_sham)
#%%
np.save('/home/.../ERP_Morph_alpha.npy', morph_alpha)
np.save('/home/.../ERP_Morph_theta.npy', morph_theta)
np.save('/home/.../ERP_Morph_sham.npy', morph_sham)
#%%
# Load arrays
X_alpha = np.load('/home/.../ERP_Source_alpha.npy', allow_pickle=True)
#X_theta = np.load('/home/.../ERP_Source_theta.npy', allow_pickle=True)
X_sham = np.load('/home/.../ERP_Source_sham.npy', allow_pickle=True)
#%%
morph_alpha = np.load('/home/.../ERP_Morph_alpha.npy', allow_pickle=True)
#morph_theta = np.load('/home/.../ERP_Morph_theta.npy', allow_pickle=True)
morph_sham = np.load('/home/.../ERP_Morph_sham.npy', allow_pickle=True)
#%%
n_vertices_fsave = morph_alpha[0].shape[0]
n_vertices_sample, n_times = X_alpha[0].data.shape
n_subjects = X_alpha.shape[0]
#%%
from numpy.random import randn
# Let's make sure our results replicate, so set the seed.
np.random.seed(0)
X = randn(n_subjects, n_vertices_sample, n_times)# * 10
#%%
# HERE I SUBTRACT X FROM X, SO I HAVE A MATRIX OF ZEROES AND SO I CAN SUBSEQUENTLY INSERT MY OWN DATA IN IT.
X = X-X
#X1 = X-X
X2 = X-X
#%%
X[:, :, :] += X_alpha[:, :, :]
#X1[:, :, :] += X_theta[:, :, :]
X2[:, :, :] += X_sham[:, :, :]
#%%
# We have to change the shape for the dot() to work properly
X = X.reshape(n_vertices_sample, n_times * n_subjects)
#X1 = X1.reshape(n_vertices_sample, n_times * n_subjects)
X2 = X2.reshape(n_vertices_sample, n_times * n_subjects)
print('Morphing data.')
#%%
del X_alpha
#del X_theta
del X_sham
#%%
X = morph_alpha[0].dot(X) # morph_mat is a sparse matrix
#%%
#X1 = morph_theta[0].dot(X1)
#%%
X2 = morph_sham[0].dot(X2)
#%%
del morph_alpha
#del morph_theta
del morph_sham
#%%
X = X.reshape(n_vertices_fsave, n_times, n_subjects)
#X1 = X1.reshape(n_vertices_fsave, n_times, n_subjects)
X2 = X2.reshape(n_vertices_fsave, n_times, n_subjects)
#%%
# Find adjacency
fs_dir = fetch_fsaverage(verbose=True)
src_fname = op.join(fs_dir, 'bem', 'fsaverage-ico-5-src.fif')
src = mne.read_source_spaces(src_fname)
print('Computing adjacency.')
adjacency = mne.spatial_src_adjacency(src)
# mne.spatio_temporal_src_adjacency(src, n_times, dist=None, verbose=None)
#%%
# Compute statistic
# -----------------
# Note that X needs to be a multi-dimensional array of shape
# observations (subjects) × time × x frequency x space, so we permute dimensions
X = np.transpose(X, [2, 1, 0])
#X1 = np.transpose(X1, [2, 1, 0])
X2 = np.transpose(X2, [2, 1, 0])
#%%
del bem
del condition_alpha
del condition_theta
del condition_sham
del dann
del dir
del epochs_PM_alpha
del epochs_PM_theta
del epochs_PM_sham
del epochs_Tfr
del evoked
del exclude_outliers
del excluded_subjs
del fname_inv
del fs_dir
del inverse_operator
del lambda2
del method
del morph_mat
del path_epochs
del sample_vertices
del session
del snr
del src
del src_fname
del studydir
del subj
del subject
del trans
#%%
# Here we set a cluster forming threshold based on a p-value for
# the cluster based permutation test.
# We use a two-tailed threshold, the "1 - p_threshold" is needed
# because for two-tailed tests we must specify a positive threshold.
p_threshold = 0.001
df = X.shape[0] - 1 # degrees of freedom for the test
f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2.,
X.shape[0] - 1, X2.shape[0] - 1)
#f_threshold = dict(start=0., step=0.6)
#%%
import time
start = time.time()
n_permutations = 50 # Should be above 1000.
# Now let's actually do the clustering. This can take a long time...
print('Clustering.')
F_obs, clusters, cluster_p_values, H0 = clu = \
spatio_temporal_cluster_test([X, X2],
n_permutations=n_permutations,
threshold=f_threshold,
#tail=0,
max_step=3,# Maximum distance between samples along the second axis of X to be considered adjacent (typically the second axis is the “time” dimension)
n_jobs=3,
seed=49,
buffer_size=None,
adjacency=adjacency,
#out_type='mask',# Maybe I'll have to change it to plot results properly
verbose=True)
end = time.time()
time_elapsed = (end - start)
print(time_elapsed)
print(time_elapsed/60,' Minutes')
print(time_elapsed/60/60,' Hours')
#%%
# Select the clusters that are statistically significant at p < 0.05
good_cluster_inds = np.where(cluster_p_values < 0.05)[0]
#%%
print('Visualizing clusters.')
# Now let's build a convenient representation of each cluster, where each
# cluster becomes a "time point" in the SourceEstimate
#fsave_vertices = [np.arange(10242), np.arange(10242)]
#%%
stc_all_cluster_vis = summarize_clusters_stc(clu,
tstep=tstep,
vertices=fsave_vertices,
subject='fsaverage')
#%%
# Let's actually plot the first "time point" in the SourceEstimate, which
# shows all the clusters, weighted by duration
# blue blobs are for condition A != condition B
brain = stc_all_cluster_vis.plot(
'fsaverage',
hemi='both',
views='lateral',
subjects_dir=subjects_dir,
time_label='temporal extent (ms)',
size=(800, 800),
smoothing_steps=5,
clim='auto')#dict(kind='value', pos_lims=[0, 1, 40]))
# We could save this via the following:
# brain.save_image('clusters.png')
#%%
- If I visualize stc for one subject just after apply_inverse (before morphing), then I get good results:
brain = condition_alpha.plot(surface='inflated',#'pial', 'white' (matter)
hemi='both',
subjects_dir=subjects_dir,
time_viewer=True)# title=None,
I guess it looks just fine, doesn’t it?
Best,
Bruno