Skip to content

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
  • Loading branch information
young-x-skyee committed Dec 21, 2023
1 parent d2a1f6a commit cad0c57
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 27 deletions.
26 changes: 15 additions & 11 deletions invokers/invoker_preproc_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
from kymata.io.cli import print_with_color
from kymata.io.yaml import load_config
from kymata.preproc.data import data_integrety_checks
from kymata.preproc.pipeline import run_preprocessing, create_trials
from kymata.preproc.hexel_current_estimation import create_forward_model_and_inverse_solution, \
create_hexel_current_files
create_hexel_current_files, create_current_estimation_prerequisites


def main():
Expand All @@ -14,22 +15,25 @@ def main():
_display_welcome_message_to_terminal()

# Load parameters
config = load_config('kymata/config/dataset4_config_file.yaml')
config = load_config('/imaging/projects/cbu/kymata/analyses/tianyi/kymata-toolbox/kymata/config/dataset4.yaml')

# Ensure we have all the data we need
data_integrety_checks(config=config)

# Preprocess EMEG raw data
# preprocessing.run_preprocessing(config=config)
run_preprocessing(list_of_participants=config['list_of_participants'],
dataset_directory_name=config['dataset_directory_name'],
n_runs=config['number_of_runs'],
emeg_machine_used_to_record_data=config['EMEG_machine_used_to_record_data'],
remove_ecg=config['remove_ECG'],
skip_maxfilter_if_previous_runs_exist=config['skip_maxfilter_if_previous_runs_exist'],
remove_veoh_and_heog=config['remove_VEOH_and_HEOG'],
automatic_bad_channel_detection_requested=config['automatic_bad_channel_detection_requested'])

# Save sensor level data, epoched by trial
# preprocessing.create_trials(config=config)

# Create Boundary Element Models
# Average the hexel current reconstructions into a single participant


# hexel_current_estimation.create_current_estimation_prerequisites(config=config)
create_current_estimation_prerequisites(config=config)

# Create forward model and inverse solution
create_forward_model_and_inverse_solution(config=config)
Expand All @@ -38,13 +42,13 @@ def main():
create_hexel_current_files(config=config)

# Average the hexel current reconstructions into a single participant
# average_participants_hexel_currents(list_of_participants=list_of_participants, input_stream=input_stream)
# average_participants_hexel_currents(list_of_participants=list_of_participants, input_stream=input_stream)

# Export data ready for BIDS format
# export_for_sharing()
# export_for_sharing()

# Run Kymata
# XYZ
# XYZ

# End code with cleanup
_run_cleanup()
Expand Down
2 changes: 1 addition & 1 deletion kymata/config/dataset4.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ mri_structurals_directory: "raw_mri_structurals"

# General information related to the dataset
list_of_participants: [
"participant_01"
"participant_08"
]
input_streams: [
"auditory"
Expand Down
16 changes: 10 additions & 6 deletions kymata/preproc/hexel_current_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,18 +170,21 @@ def create_forward_model_and_inverse_solution(config: dict):

list_of_participants = config['list_of_participants']
dataset_directory_name = config['dataset_directory_name']
intrim_preprocessing_directory_name = Path(Path(path.abspath("")), "data", dataset_directory_name,
"intrim_preprocessing_files")
# intrim_preprocessing_directory_name = Path(Path(path.abspath("")), "data", dataset_directory_name,
# "intrim_preprocessing_files")
intrim_preprocessing_directory_name = Path("/imaging/projects/cbu/kymata", "data", dataset_directory_name, "intrim_preprocessing_files")
mri_structurals_directory = config['mri_structurals_directory']
mri_structurals_directory = Path(Path(path.abspath("")), "data", dataset_directory_name, mri_structurals_directory)
# mri_structurals_directory = Path(Path(path.abspath("")), "data", dataset_directory_name, mri_structurals_directory)
mri_structurals_directory = Path("/imaging/projects/cbu/kymata", "data", dataset_directory_name, mri_structurals_directory)

# Compute forward solution
for participant in list_of_participants:

fwd = mne.make_forward_solution(
Path(Path(path.abspath("")), "data",
# Path(Path(path.abspath("")), "data",
Path("/imaging/projects/cbu/kymata", "data",
dataset_directory_name,
'raw', participant, participant +
'raw_emeg', participant, participant +
'_run1_raw.fif'), # note this file is only used for the sensor positions.
trans=Path(intrim_preprocessing_directory_name, "4_hexel_current_reconstruction","coregistration_files", participant + '-trans.fif'),
src=Path(intrim_preprocessing_directory_name, "4_hexel_current_reconstruction","src_files", participant + '_ico5-src.fif'),
Expand Down Expand Up @@ -241,7 +244,8 @@ def create_hexel_current_files(config: dict):
number_of_trials = config['number_of_trials']
dataset_directory_name = config['dataset_directory_name']
intrim_preprocessing_directory_name = Path(
Path(path.abspath("")),
# Path(path.abspath("")),
"/imaging/projects/cbu/kymata",
"data", dataset_directory_name,
"intrim_preprocessing_files")
mri_structurals_directory = config['mri_structurals_directory']
Expand Down
18 changes: 9 additions & 9 deletions kymata/preproc/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,20 @@ def run_preprocessing(list_of_participants: list[str],
print_with_color(f" Loading Raw data...", Fore.GREEN)

# set filename. (Use .fif.gz extension to use gzip to compress)
saved_maxfiltered_filename = 'data/' + dataset_directory_name + '/intrim_preprocessing_files/1_maxfiltered/' + participant + "_run" + str(
saved_maxfiltered_filename = '/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/intrim_preprocessing_files/1_maxfiltered/' + participant + "_run" + str(
run) + '_raw_sss.fif'

if skip_maxfilter_if_previous_runs_exist and os.path.isfile(saved_maxfiltered_filename):
raw_fif_data_sss_movecomp_tr = mne.io.Raw(saved_maxfiltered_filename, preload=True)

else:
raw_fif_data = mne.io.Raw(
'data/' + dataset_directory_name + "/raw/" + participant + "/" + participant + "_run" + str(
'/imaging/projects/cbu/kymata/data/' + dataset_directory_name + "/raw_emeg/" + participant + "/" + participant + "_run" + str(
run) + "_raw.fif", preload=True)

# Rename any channels that require it, and their type
recording_config = load_config(
'data/' + dataset_directory_name + '/raw/' + participant + "/" + participant + '_recording_config.yaml')
'/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/raw_emeg/' + participant + "/" + participant + '_recording_config.yaml')
ecg_and_eog_channel_name_and_type_overwrites = recording_config[
'ECG_and_EOG_channel_name_and_type_overwrites']

Expand Down Expand Up @@ -105,8 +105,8 @@ def run_preprocessing(list_of_participants: list[str],
# Apply SSS and movement compensation
print_with_color(f" Applying SSS and movement compensation...", Fore.GREEN)

fine_cal_file = 'data/cbu_specific_files/SSS/sss_cal_' + emeg_machine_used_to_record_data + '.dat'
crosstalk_file = 'data/cbu_specific_files/SSS/ct_sparse_' + emeg_machine_used_to_record_data + '.fif'
fine_cal_file = '/imaging/projects/cbu/kymata/data/cbu_specific_files/SSS/sss_cal_' + emeg_machine_used_to_record_data + '.dat'
crosstalk_file = '/imaging/projects/cbu/kymata/data/cbu_specific_files/SSS/ct_sparse_' + emeg_machine_used_to_record_data + '.fif'

mne.viz.plot_head_positions(
head_pos_data, mode='field', destination=raw_fif_data.info['dev_head_t'], info=raw_fif_data.info)
Expand Down Expand Up @@ -182,7 +182,7 @@ def run_preprocessing(list_of_participants: list[str],
mne.viz.plot_raw(raw_fif_data_sss_movecomp_tr)

raw_fif_data_sss_movecomp_tr.save(
'data/' + dataset_directory_name + '/intrim_preprocessing_files/2_cleaned/' + participant + "_run" + str(
'/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/intrim_preprocessing_files/2_cleaned/' + participant + "_run" + str(
run) + '_cleaned_raw.fif.gz',
overwrite=True)

Expand Down Expand Up @@ -261,7 +261,7 @@ def create_trials(dataset_directory_name: str,
cleaned_raws = []

for run in range(1, number_of_runs + 1):
raw_fname = 'data/' + dataset_directory_name + '/intrim_preprocessing_files/2_cleaned/' + p + '_run' + str(run) + '_cleaned_raw.fif.gz'
raw_fname = '/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/intrim_preprocessing_files/2_cleaned/' + p + '_run' + str(run) + '_cleaned_raw.fif.gz'
raw = mne.io.Raw(raw_fname, preload=True)
cleaned_raws.append(raw)

Expand Down Expand Up @@ -328,7 +328,7 @@ def create_trials(dataset_directory_name: str,
# Log which channels are worst
dropfig = epochs.plot_drop_log(subject=p)
dropfig.savefig(
'data/' + dataset_directory_name + '/intrim_preprocessing_files/3_evoked_sensor_data/logs/' + input_stream + '_drop-log_' + p + '.jpg')
'/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/intrim_preprocessing_files/3_evoked_sensor_data/logs/' + input_stream + '_drop-log_' + p + '.jpg')

global_droplog.append('[' + input_stream + ']' + p + ':' + str(epochs.drop_log_stats(epochs.drop_log)))

Expand All @@ -354,7 +354,7 @@ def create_trials(dataset_directory_name: str,
print_with_color(f"... save grand covariance matrix", Fore.GREEN)

cov = mne.compute_raw_covariance(raw, tmin=0, tmax=10, return_estimators=True)
mne.write_cov('data/' + dataset_directory_name + '/intrim_preprocessing_files/3_evoked_sensor_data/covariance_grand_average/' + p + '-auto-cov.fif', cov)
mne.write_cov('/imaging/projects/cbu/kymata/data/' + dataset_directory_name + '/intrim_preprocessing_files/3_evoked_sensor_data/covariance_grand_average/' + p + '-auto-cov.fif', cov)


# Save global droplog
Expand Down

0 comments on commit cad0c57

Please sign in to comment.