Skip to content

[ENH] CompCor enhancement #2878

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 42 commits into from
Apr 29, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
329c74d
add variance-driven component selection, return component metadata
rciric Jan 19, 2019
17f3e12
expose metadata to interface, fix component selection for multiple masks
rciric Jan 19, 2019
114e6d4
propagate failure mode if provided
rciric Jan 19, 2019
6f4fc19
allow mask naming in metadata
rciric Jan 19, 2019
4d2208e
add contributor
rciric Jan 19, 2019
bfbde82
include component index in metadata
rciric Jan 19, 2019
0373879
update autotests and make naming consistent
rciric Jan 19, 2019
2c551d0
(CompCor) more intuitive interface following review from @effigies
rciric Jan 20, 2019
a53cd46
manually set `num_components` in test
rciric Jan 20, 2019
b811d47
manually set `num_components` in test
rciric Jan 20, 2019
2743189
Merge branch 'master' of https://github.com/rciric/nipype
rciric Jan 21, 2019
577e395
add unit test for variance_threshold condition
rciric Jan 21, 2019
66c7540
provide mask name to circumvent test failure
rciric Jan 21, 2019
0bb0096
(CompCor) try using an OrderedDict for metadata
rciric Jan 21, 2019
94bea4a
first-pass refactor CompCor to SimpleInterface
rciric Feb 4, 2019
addb0e9
return metadata for all components regardless of retention criterion
rciric Feb 6, 2019
b04c9ca
@oesteban: limit np array use, clean up conditionals, remove invalid obj
rciric Feb 8, 2019
e957e87
less np array use; unique names for dropped components
rciric Feb 9, 2019
797801e
ensure absolute path to components file
rciric Feb 9, 2019
67a3276
(CompCor) try BaseInterface
rciric Feb 9, 2019
fe430f5
ensure absolute path to components file
rciric Feb 9, 2019
1625bdb
update per @oesteban 's review
rciric Feb 15, 2019
9afb3f5
assign output to _results
rciric Feb 15, 2019
689d064
assign output to _results
rciric Feb 15, 2019
f390bc6
some fixes
oesteban Feb 16, 2019
ad3d440
testing pickling of variance_threshold
oesteban Feb 16, 2019
fd41b74
``traits.Range`` cannot be pickled with traits>=5 and python 2.7
oesteban Feb 16, 2019
01a78ec
Merge pull request #1 from oesteban/rciric-patch-1
rciric Feb 16, 2019
a742c9c
pacify codacy
rciric Feb 16, 2019
518a489
revert unnecessary squeeze, correct docs
rciric Feb 21, 2019
deceb95
revise in accordance with @effigies review
rciric Feb 22, 2019
fa64907
revise in accordance with @effigies review
rciric Feb 23, 2019
e6dfe7d
ensure s is defined, support NaN failure mode with empty mask
rciric Mar 1, 2019
27ed03f
filter handles empty masks, use `squeeze_image`
rciric Mar 27, 2019
422c04c
Merge branch 'master' of https://github.com/nipy/nipype
rciric Mar 27, 2019
144fca3
Merge branch 'master' of https://github.com/nipy/nipype
rciric Mar 28, 2019
82a25c2
default to old behaviour for temporal filters
rciric Mar 28, 2019
4c1af8a
Merge branch 'master' into master
effigies Apr 11, 2019
79e840d
integrate @effigies review comments
rciric Apr 19, 2019
1b1b6fa
propagate retention status to metadata; use list instead of generator…
rciric Apr 19, 2019
89ba3b4
Merge branch 'master' of https://github.com/rciric/nipype
rciric Apr 19, 2019
b80a3d7
update unit test to include new metadata field
rciric Apr 19, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .zenodo.json
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,11 @@
"name": "Liem, Franz",
"orcid": "0000-0003-0646-4810"
},
{
"affiliation": "Stanford University",
"name": "Ciric, Rastko",
"orcid": "0000-0001-6347-7939"
},
{
"affiliation": "The Centre for Addiction and Mental Health",
"name": "Joseph, Michael",
Expand Down
292 changes: 220 additions & 72 deletions nipype/algorithms/confounds.py

Large diffs are not rendered by default.

66 changes: 57 additions & 9 deletions nipype/algorithms/tests/test_CompCor.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,20 +48,48 @@ def test_compcor(self):

self.run_cc(
CompCor(
num_components=6,
realigned_file=self.realigned_file,
mask_files=self.mask_files,
mask_index=0), expected_components)

self.run_cc(
ACompCor(
num_components=6,
realigned_file=self.realigned_file,
mask_files=self.mask_files,
mask_index=0,
components_file='acc_components_file'), expected_components,
'aCompCor')

def test_compcor_variance_threshold_and_metadata(self):
expected_components = [['-0.2027150345', '-0.4954813834'],
['0.2565929051', '0.7866217875'],
['-0.3550986008', '-0.0089784905'],
['0.7512786244', '-0.3599828482'],
['-0.4500578942', '0.0778209345']]
expected_metadata = {
'component': 'CompCor00',
'mask': 'mask',
'singular_value': '4.0720553036',
'variance_explained': '0.5527211465',
'cumulative_variance_explained': '0.5527211465',
'retained': 'True',
}
ccinterface = CompCor(
variance_threshold=0.7,
realigned_file=self.realigned_file,
mask_files=self.mask_files,
mask_names=['mask'],
mask_index=1,
save_metadata=True)
self.run_cc(ccinterface=ccinterface,
expected_components=expected_components,
expected_n_components=2,
expected_metadata=expected_metadata)

def test_tcompcor(self):
ccinterface = TCompCor(
ccinterface = TCompCor(num_components=6,
realigned_file=self.realigned_file, percentile_threshold=0.75)
self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [
'0.4566907310', '0.6983205193'
Expand All @@ -70,7 +98,8 @@ def test_tcompcor(self):
], ['-0.1342351356', '0.1407855119']], 'tCompCor')

def test_tcompcor_no_percentile(self):
ccinterface = TCompCor(realigned_file=self.realigned_file)
ccinterface = TCompCor(num_components=6,
realigned_file=self.realigned_file)
ccinterface.run()

mask = nb.load('mask_000.nii.gz').get_data()
Expand All @@ -80,6 +109,7 @@ def test_tcompcor_no_percentile(self):
def test_compcor_no_regress_poly(self):
self.run_cc(
CompCor(
num_components=6,
realigned_file=self.realigned_file,
mask_files=self.mask_files,
mask_index=0,
Expand Down Expand Up @@ -151,7 +181,9 @@ def test_tcompcor_multi_mask_no_index(self):
def run_cc(self,
ccinterface,
expected_components,
expected_header='CompCor'):
expected_header='CompCor',
expected_n_components=None,
expected_metadata=None):
# run
ccresult = ccinterface.run()

Expand All @@ -160,13 +192,14 @@ def run_cc(self,
assert ccresult.outputs.components_file == expected_file
assert os.path.exists(expected_file)
assert os.path.getsize(expected_file) > 0
assert ccinterface.inputs.num_components == 6

with open(ccresult.outputs.components_file, 'r') as components_file:
expected_n_components = min(ccinterface.inputs.num_components,
self.fake_data.shape[3])
if expected_n_components is None:
expected_n_components = min(ccinterface.inputs.num_components,
self.fake_data.shape[3])

components_data = [line.split('\t') for line in components_file]
components_data = [line.rstrip().split('\t')
for line in components_file]

# the first item will be '#', we can throw it out
header = components_data.pop(0)
Expand All @@ -180,9 +213,24 @@ def run_cc(self,
num_got_timepoints = len(components_data)
assert num_got_timepoints == self.fake_data.shape[3]
for index, timepoint in enumerate(components_data):
assert (len(timepoint) == ccinterface.inputs.num_components
or len(timepoint) == self.fake_data.shape[3])
assert (len(timepoint) == expected_n_components)
assert timepoint[:2] == expected_components[index]

if ccinterface.inputs.save_metadata:
expected_metadata_file = (
ccinterface._list_outputs()['metadata_file'])
assert ccresult.outputs.metadata_file == expected_metadata_file
assert os.path.exists(expected_metadata_file)
assert os.path.getsize(expected_metadata_file) > 0

with open(ccresult.outputs.metadata_file, 'r') as metadata_file:
components_metadata = [line.rstrip().split('\t')
for line in metadata_file]
components_metadata = {i: j for i, j in
zip(components_metadata[0],
components_metadata[1])}
assert components_metadata == expected_metadata

return ccresult

@staticmethod
Expand Down
6 changes: 5 additions & 1 deletion nipype/algorithms/tests/test_auto_ACompCor.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,23 @@ def test_ACompCor_inputs():
requires=['mask_files'],
xor=['merge_method'],
),
mask_names=dict(),
merge_method=dict(
requires=['mask_files'],
xor=['mask_index'],
),
num_components=dict(usedefault=True, ),
num_components=dict(xor=['variance_threshold'], ),
pre_filter=dict(usedefault=True, ),
realigned_file=dict(mandatory=True, ),
regress_poly_degree=dict(usedefault=True, ),
repetition_time=dict(),
save_metadata=dict(),
save_pre_filter=dict(),
use_regress_poly=dict(
deprecated='0.15.0',
new_name='pre_filter',
),
variance_threshold=dict(xor=['num_components'], ),
)
inputs = ACompCor.input_spec()

Expand All @@ -38,6 +41,7 @@ def test_ACompCor_inputs():
def test_ACompCor_outputs():
output_map = dict(
components_file=dict(),
metadata_file=dict(),
pre_filter_file=dict(),
)
outputs = ACompCor.output_spec()
Expand Down
6 changes: 5 additions & 1 deletion nipype/algorithms/tests/test_auto_TCompCor.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,24 @@ def test_TCompCor_inputs():
requires=['mask_files'],
xor=['merge_method'],
),
mask_names=dict(),
merge_method=dict(
requires=['mask_files'],
xor=['mask_index'],
),
num_components=dict(usedefault=True, ),
num_components=dict(xor=['variance_threshold'], ),
percentile_threshold=dict(usedefault=True, ),
pre_filter=dict(usedefault=True, ),
realigned_file=dict(mandatory=True, ),
regress_poly_degree=dict(usedefault=True, ),
repetition_time=dict(),
save_metadata=dict(),
save_pre_filter=dict(),
use_regress_poly=dict(
deprecated='0.15.0',
new_name='pre_filter',
),
variance_threshold=dict(xor=['num_components'], ),
)
inputs = TCompCor.input_spec()

Expand All @@ -40,6 +43,7 @@ def test_TCompCor_outputs():
output_map = dict(
components_file=dict(),
high_variance_masks=dict(),
metadata_file=dict(),
pre_filter_file=dict(),
)
outputs = TCompCor.output_spec()
Expand Down
2 changes: 1 addition & 1 deletion nipype/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def get_nipype_gitversion():
'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37,
'python-dateutil>=%s' % DATEUTIL_MIN_VERSION,
'scipy>=%s' % SCIPY_MIN_VERSION,
'traits>=%s' % TRAITS_MIN_VERSION,
'traits>=%s,!=5.0' % TRAITS_MIN_VERSION,
'future>=%s' % FUTURE_MIN_VERSION,
'simplejson>=%s' % SIMPLEJSON_MIN_VERSION,
'prov>=%s' % PROV_VERSION,
Expand Down
1 change: 0 additions & 1 deletion nipype/workflows/rsfmri/fsl/resting.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import (print_function, division, unicode_literals,
absolute_import)
from builtins import str

from ....interfaces import fsl as fsl # fsl
from ....interfaces import utility as util # utility
Expand Down
21 changes: 11 additions & 10 deletions nipype/workflows/rsfmri/fsl/tests/test_resting.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,16 +89,17 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf):
# assert
expected_file = os.path.abspath(self.out_filenames['components_file'])
with open(expected_file, 'r') as components_file:
components_data = [line.split() for line in components_file]
num_got_components = len(components_data)
assert (num_got_components == self.num_noise_components
or num_got_components == self.fake_data.shape[3])
first_two = [row[:2] for row in components_data[1:]]
assert first_two == [['-0.5172356654', '-0.6973053243'], [
'0.2574722644', '0.1645270737'
], ['-0.0806469590',
'0.5156853779'], ['0.7187176051', '-0.3235820287'],
['-0.3783072450', '0.3406749013']]
components_data = [line.rstrip().split()
for line in components_file]
num_got_components = len(components_data)
assert (num_got_components == self.num_noise_components or
num_got_components == self.fake_data.shape[3])
first_two = [row[:2] for row in components_data[1:]]
assert first_two == [['-0.5172356654', '-0.6973053243'],
['0.2574722644', '0.1645270737'],
['-0.0806469590', '0.5156853779'],
['0.7187176051', '-0.3235820287'],
['-0.3783072450', '0.3406749013']]

fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]],
[[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]],
Expand Down