Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Profile scripts #16

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion scripts/processing/02-maxwell_filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,5 @@ def run_filter(subject_id):
raw.save(raw_out, overwrite=True)


run_filter(subject_id=1) # Only for sub01.
if __name__ == '__main__':
run_filter(subject_id=1) # Only for sub01.
5 changes: 3 additions & 2 deletions scripts/processing/02-python_filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,6 @@ def run_filter(subject_id):
raw.save(raw_out, overwrite=True)


parallel, run_func, _ = parallel_func(run_filter, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_filter, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
6 changes: 4 additions & 2 deletions scripts/processing/03-run_extract_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,7 @@ def run_events(subject_id):
fname_events = op.join(data_path, 'run_%02d_filt_sss-eve.fif' % run)
mne.write_events(fname_events, events)

parallel, run_func, _ = parallel_func(run_events, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))

if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_events, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
7 changes: 4 additions & 3 deletions scripts/processing/04-run_ica.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def run_ica(subject_id, tsss=False):
ica.save(ica_name)


parallel, run_func, _ = parallel_func(run_ica, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_ica(1, True)
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_ica, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_ica(1, True)
11 changes: 6 additions & 5 deletions scripts/processing/05-make_epochs.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,10 @@ def run_epochs(subject_id, tsss=False):
% (subject, l_freq)))


###############################################################################
# Let us make the script parallel across subjects
if __name__ == '__main__':
###########################################################################
# Let us make the script parallel across subjects

parallel, run_func, _ = parallel_func(run_epochs, n_jobs=1)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_epochs(1, True) # run on maxwell filtered data
parallel, run_func, _ = parallel_func(run_epochs, n_jobs=1)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_epochs(1, True) # run on maxwell filtered data
7 changes: 4 additions & 3 deletions scripts/processing/06-make_evoked.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ def run_evoked(subject_id, tsss=False):
% (subject, l_freq)))


parallel, run_func, _ = parallel_func(run_evoked, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_evoked(1, True) # run on maxwell filtered data
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_evoked, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
run_evoked(1, True) # run on maxwell filtered data
5 changes: 3 additions & 2 deletions scripts/processing/07-time_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,6 @@ def run_time_frequency(subject_id):
overwrite=True)


parallel, run_func, _ = parallel_func(run_time_frequency, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_time_frequency, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
13 changes: 7 additions & 6 deletions scripts/processing/08-run_time_decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
# Then we write a function to do time decoding on one subject


def run_time_decoding(subject_id, condition1, condition2):
def run_time_decoding(subject_id, condition1='face', condition2='scrambled'):
subject = "sub%03d" % subject_id
data_path = os.path.join(meg_dir, subject)
epochs = mne.read_epochs(os.path.join(data_path,
Expand Down Expand Up @@ -72,8 +72,9 @@ def run_time_decoding(subject_id, condition1, condition2):
# This may take a large amount of memory because the epochs will be
# replicated for each parallel job

parallel, run_func, _ = parallel_func(run_time_decoding, n_jobs=N_JOBS)
parallel(run_func(subject_id, 'face', 'scrambled')
for subject_id in range(1, 20))
parallel(run_func(subject_id, 'face/famous', 'face/unfamiliar')
for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_time_decoding, n_jobs=N_JOBS)
parallel(run_func(subject_id, 'face', 'scrambled')
for subject_id in range(1, 20))
parallel(run_func(subject_id, 'face/famous', 'face/unfamiliar')
for subject_id in range(1, 20))
6 changes: 3 additions & 3 deletions scripts/processing/09-group_average_sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@

exclude = [1, 5, 16] # Excluded subjects

for run in range(1, 20):
if run in exclude:
for subject_id in range(1, 20):
if subject_id in exclude:
continue
subject = "sub%03d" % run
subject = "sub%03d" % subject_id
print("processing subject: %s" % subject)
data_path = op.join(meg_dir, subject)

Expand Down
5 changes: 3 additions & 2 deletions scripts/processing/12-make_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,6 @@ def run_forward(subject_id):
mne.write_forward_solution(fname_fwd, fwd, overwrite=True)


parallel, run_func, _ = parallel_func(run_forward, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_forward, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
5 changes: 3 additions & 2 deletions scripts/processing/13-make_inverse.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,6 @@ def run_inverse(subject_id):
stc.save(op.join(data_path, 'mne_dSPM_inverse-%s' % evoked.comment))


parallel, run_func, _ = parallel_func(run_inverse, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_inverse, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
16 changes: 9 additions & 7 deletions scripts/processing/14-group_average_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,12 @@ def morph_stc(subject_id):
return morphed


parallel, run_func, _ = parallel_func(morph_stc, n_jobs=N_JOBS)
stcs = parallel(run_func(subject_id) for subject_id in range(1, 20)
if subject_id not in exclude)

data = np.average([s.data for s in stcs], axis=0)
stc = mne.SourceEstimate(data, stcs[0].vertices, stcs[0].tmin, stcs[0].tstep)
stc.save(op.join(meg_dir, 'contrast-average'))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(morph_stc, n_jobs=N_JOBS)
stcs = parallel(run_func(subject_id) for subject_id in range(1, 20)
if subject_id not in exclude)

data = np.average([s.data for s in stcs], axis=0)
stc = mne.SourceEstimate(data, stcs[0].vertices, stcs[0].tmin,
stcs[0].tstep)
stc.save(op.join(meg_dir, 'contrast-average'))
5 changes: 3 additions & 2 deletions scripts/processing/15-lcmv_beamformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,6 @@ def run_inverse(subject_id):
stc.save(op.join(data_path, 'mne_LCMV_inverse-contrast'))


parallel, run_func, _ = parallel_func(run_inverse, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
if __name__ == '__main__':
parallel, run_func, _ = parallel_func(run_inverse, n_jobs=N_JOBS)
parallel(run_func(subject_id) for subject_id in range(1, 20))
42 changes: 42 additions & 0 deletions scripts/processing/profile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import time
from importlib import import_module
from memory_profiler import memory_usage
import matplotlib.pyplot as plt

subject_id = 1

filenames = ['02-python_filtering', '03-run_extract_events',
'04-run_ica', '05-make_epochs', '06-make_evoked',
'07-time_frequency', '08-run_time_decoding',
'12-make_forward', '13-make_inverse', '14-group_average_source']

funcs = ['run_filter', 'run_events', 'run_ica', 'run_epochs',
'run_evoked', 'run_time_frequency', 'run_time_decoding',
'run_forward', 'run_inverse', 'morph_stc']

times, memory = list(), list()
for fname, func in zip(filenames, funcs):
t1 = time.time()
print('Importing %s' % fname)
mod = import_module(fname)
func, args, kwargs = getattr(mod, func), (subject_id, ), {}
mem = memory_usage((func, args, kwargs), max_usage=True)[0]
times.append(time.time() - t1)
memory.append(mem)


def make_autopct(values, unit):
def my_autopct(pct):
total = sum(values)
val = int(round(pct * total / 100.0))
return '{p:.2f}% \n({v:d} {u:s})'.format(p=pct, v=val, u=unit)
return my_autopct


# Plot memory and time taken
plt.pie(memory, labels=funcs, autopct=make_autopct(memory, 'MB'))
plt.title('Memory usage')

plt.figure()
plt.pie(times, labels=funcs, autopct=make_autopct(times, 's'))
plt.title('Time taken')