Skip to content

Commit

Permalink
Merge branch 'main' into 51PW_reproduction
Browse files Browse the repository at this point in the history
  • Loading branch information
bclenet committed Feb 7, 2024
2 parents 7eac2af + f87cf70 commit 2156ff0
Show file tree
Hide file tree
Showing 31 changed files with 2,098 additions and 849 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/code_quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
- name: Analyse the code with pylint
run: |
pylint --fail-under 8 --ignore-paths narps_open/pipelines/ narps_open > pylint_report_narps_open.txt
pylint --fail-under 8 narps_open > pylint_report_narps_open.txt
pylint --fail-under 8 tests > pylint_report_tests.txt
- name: Archive pylint results
Expand Down
4 changes: 2 additions & 2 deletions narps_open/pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@
'R9K3': None,
'SM54': None,
'T54A': 'PipelineTeamT54A',
'U26C': None,
'U26C': 'PipelineTeamU26C',
'UI76': None,
'UK24': None,
'V55J': None,
'VG39': None,
'X19V': None,
'X19V': 'PipelineTeamX19V',
'X1Y5': None,
'X1Z4': None,
'XU70': None
Expand Down
30 changes: 0 additions & 30 deletions narps_open/pipelines/__main__.py

This file was deleted.

93 changes: 72 additions & 21 deletions narps_open/pipelines/team_08MQ.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,23 +386,24 @@ def get_preprocessing_outputs(self):
parameters = {
'subject_id': self.subject_list,
'run_id': self.run_list,
'file': [
'components_file.txt',
'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mcf.nii.gz.par',
'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mcf_st_smooth_flirt_wtsimt.nii.gz',
'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mask_flirt_wtsimt.nii.gz'
]
}
parameter_sets = product(*parameters.values())
template = join(
output_dir = join(
self.directories.output_dir,
'preprocessing',
'_run_id_{run_id}_subject_id_{subject_id}',
'{file}'
)
'_run_id_{run_id}_subject_id_{subject_id}'
)
templates = [
join(output_dir, 'components_file.txt'),
join(output_dir, 'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mcf.nii.gz.par'),
join(output_dir,
'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mcf_st_smooth_flirt_wtsimt.nii.gz'),
join(output_dir,
'sub-{subject_id}_task-MGT_run-{run_id}_bold_brain_mask_flirt_wtsimt.nii.gz')
]

return [template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]
for parameter_values in parameter_sets for template in templates]

def get_subject_information(event_file):
"""
Expand Down Expand Up @@ -586,21 +587,20 @@ def get_run_level_outputs(self):
'run_id' : self.run_list,
'subject_id' : self.subject_list,
'contrast_id' : self.contrast_list,
'file' : [
join('results', 'cope{contrast_id}.nii.gz'),
join('results', 'tstat{contrast_id}.nii.gz'),
join('results', 'varcope{contrast_id}.nii.gz'),
join('results', 'zstat{contrast_id}.nii.gz'),
]
}
parameter_sets = product(*parameters.values())
template = join(
output_dir = join(
self.directories.output_dir,
'run_level_analysis', '_run_id_{run_id}_subject_id_{subject_id}','{file}'
'run_level_analysis', '_run_id_{run_id}_subject_id_{subject_id}'
)

templates = [
join(output_dir, 'results', 'cope{contrast_id}.nii.gz'),
join(output_dir, 'results', 'tstat{contrast_id}.nii.gz'),
join(output_dir, 'results', 'varcope{contrast_id}.nii.gz'),
join(output_dir, 'results', 'zstat{contrast_id}.nii.gz'),
]
return_list += [template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]
for parameter_values in parameter_sets for template in templates]

return return_list

Expand Down Expand Up @@ -1005,6 +1005,57 @@ def get_group_level_analysis_sub_workflow(self, method):

return group_level_analysis

def get_group_level_outputs(self):
""" Return all names for the files the group level analysis is supposed to generate. """

# Handle equalRange and equalIndifference
parameters = {
'contrast_id': self.contrast_list,
'method': ['equalRange', 'equalIndifference'],
'file': [
'randomise_tfce_corrp_tstat1.nii.gz',
'randomise_tfce_corrp_tstat2.nii.gz',
'randomise_tstat1.nii.gz',
'randomise_tstat2.nii.gz',
'tstat1.nii.gz',
'tstat2.nii.gz',
'zstat1.nii.gz',
'zstat2.nii.gz'
],
'nb_subjects' : [str(len(self.subject_list))]
}
parameter_sets = product(*parameters.values())
template = join(
self.directories.output_dir,
'group_level_analysis_{method}_nsub_{nb_subjects}',
'_contrast_id_{contrast_id}',
'{file}'
)

return_list = [template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]

# Handle groupComp
parameters = {
'contrast_id': self.contrast_list,
'file': [
'randomise_tfce_corrp_tstat1.nii.gz',
'randomise_tstat1.nii.gz',
'zstat1.nii.gz',
'tstat1.nii.gz'
]
}
parameter_sets = product(*parameters.values())
template = join(
self.directories.output_dir,
f'group_level_analysis_groupComp_nsub_{len(self.subject_list)}',
'_contrast_id_{contrast_id}', '{file}')

return_list += [template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]

return return_list

def get_hypotheses_outputs(self):
""" Return the names of the files used by the team to answer the hypotheses of NARPS. """

Expand Down
1 change: 1 addition & 0 deletions narps_open/pipelines/team_0I4U_debug.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
from nipype.interfaces.spm import (Coregister, Smooth, OneSampleTTestDesign, EstimateModel, EstimateContrast,
Level1Design, TwoSampleTTestDesign, RealignUnwarp, FieldMap, NewSegment,
Normalize12, Reslice)
Expand Down
1 change: 1 addition & 0 deletions narps_open/pipelines/team_1KB2_debug.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
from nipype.interfaces.fsl import (BET, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN,
Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel,
L2Model, Merge, FLAMEO, ContrastMgr,Cluster, FILMGLS, Randomise, MultipleRegressDesign)
Expand Down
1 change: 1 addition & 0 deletions narps_open/pipelines/team_43FJ_debug.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
from nipype.interfaces.fsl import (BET, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN, MotionOutliers,
Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel,
L2Model, Merge, FLAMEO, ContrastMgr, FILMGLS, Randomise, MultipleRegressDesign)
Expand Down
1 change: 1 addition & 0 deletions narps_open/pipelines/team_4TQ6_wip.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
from nipype.interfaces.fsl import (BET, ICA_AROMA, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN,
Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel,
L2Model, Merge, FLAMEO, ContrastMgr,Cluster, FILMGLS, Randomise, MultipleRegressDesign)
Expand Down
1 change: 1 addition & 0 deletions narps_open/pipelines/team_98BT.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
from nipype.interfaces.spm import (Coregister, Smooth, OneSampleTTestDesign, EstimateModel, EstimateContrast,
Level1Design, TwoSampleTTestDesign, RealignUnwarp, NewSegment, SliceTiming,
DARTEL, DARTELNorm2MNI, FieldMap)
Expand Down
63 changes: 42 additions & 21 deletions narps_open/pipelines/team_Q6O0.py
Original file line number Diff line number Diff line change
Expand Up @@ -468,11 +468,14 @@ def get_subject_level_outputs(self):
)

# Formatting templates and returning it as a list of files
output_files = [contrast_map_template.format(**dict(zip(parameters.keys(), parameter_values)))\
output_files = [
contrast_map_template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]
output_files += [mat_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\
output_files += [
mat_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]
output_files += [spmt_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\
output_files += [
spmt_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]

return output_files
Expand Down Expand Up @@ -698,23 +701,41 @@ def get_hypotheses_outputs(self):
""" Return all hypotheses output file names. """
nb_sub = len(self.subject_list)
files = [
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', 'spmT_0002.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', 'spmT_0002.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii'),
join(f'l2_analysis_groupComp_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_groupComp_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii')
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_gain', 'spmT_0001.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_loss', 'spmT_0002.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_loss', 'spmT_0002.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalIndifference_nsub_{nb_sub}',
'_model_type_loss', 'spmT_0001.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_equalRange_nsub_{nb_sub}',
'_model_type_loss', 'spmT_0001.nii'),
join(f'l2_analysis_groupComp_nsub_{nb_sub}',
'_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'),
join(f'l2_analysis_groupComp_nsub_{nb_sub}',
'_model_type_loss', 'spmT_0001.nii')
]
return [join(self.directories.output_dir, f) for f in files]
1 change: 1 addition & 0 deletions narps_open/pipelines/team_R9K3_wip.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: skip-file
# THIS IS A TEMPLATE THAT CAN BE USE TO REPRODUCE A NEW PIPELINE

import os
Expand Down
21 changes: 13 additions & 8 deletions narps_open/pipelines/team_T54A.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,17 +765,22 @@ def get_group_level_outputs(self):
for parameter_values in parameter_sets]

# Handle groupComp
files = [
'randomise_tfce_corrp_tstat1.nii.gz',
'randomise_tstat1.nii.gz',
'zstat1.nii.gz',
'tstat1.nii.gz'
parameters = {
'contrast_id': self.contrast_list,
'file': [
'randomise_tfce_corrp_tstat1.nii.gz',
'randomise_tstat1.nii.gz',
'zstat1.nii.gz',
'tstat1.nii.gz'
]

return_list += [join(
}
parameter_sets = product(*parameters.values())
template = join(
self.directories.output_dir,
f'group_level_analysis_groupComp_nsub_{len(self.subject_list)}',
'_contrast_id_2', f'{file}') for file in files]
'_contrast_id_{contrast_id}', '{file}')
return_list += [template.format(**dict(zip(parameters.keys(), parameter_values)))\
for parameter_values in parameter_sets]

return return_list

Expand Down
Loading

0 comments on commit 2156ff0

Please sign in to comment.