From 0d030cdd48623eaaa3bd57c1848ed2547844971a Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 16:46:46 +0200 Subject: [PATCH 1/6] add CI for dead links and markdown linter --- .github/workflows/check_md_links.yml | 12 ++++ .prettierrc | 13 +++++ .remarkrc | 11 ++++ .travis.yml | 86 +++++++++++++++++----------- npm-requirements.txt | 7 +++ 5 files changed, 97 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/check_md_links.yml create mode 100644 .prettierrc create mode 100644 .remarkrc create mode 100644 npm-requirements.txt diff --git a/.github/workflows/check_md_links.yml b/.github/workflows/check_md_links.yml new file mode 100644 index 00000000..3d461992 --- /dev/null +++ b/.github/workflows/check_md_links.yml @@ -0,0 +1,12 @@ +name: Check Markdown links + +# checking for any dead links in markdown files + +on: push + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - uses: gaurav-nelson/github-action-markdown-link-check@v1 \ No newline at end of file diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..6e0d1ce8 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,13 @@ +{ + "parser": "markdown", + "proseWrap": "always", + "tabWidth": 2, + "overrides": [ + { + "files": "*.md", + "options": { + "tabWidth": 4 + } + } + ] +} diff --git a/.remarkrc b/.remarkrc new file mode 100644 index 00000000..201ce70e --- /dev/null +++ b/.remarkrc @@ -0,0 +1,11 @@ +{ + "plugins": [ + "preset-lint-consistent", + "preset-lint-markdown-style-guide", + "preset-lint-recommended", + ["lint-no-duplicate-headings", false], + ["lint-list-item-indent", "tab-size"], + ["lint-maximum-line-length", true], + ["lint-maximum-heading-length", false] + ] +} diff --git a/.travis.yml b/.travis.yml index 83dd787c..ef870536 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,42 +12,64 @@ python: cache: apt: true # only works with Pro version + directories: + - node_modules # NPM packages for the remark markdown linter + +branches: + only: # only run the CI for those branches + - master + - dev env: global: - OCTFLAGS="--no-gui --no-window-system --silent" -before_install: - # install octave - - travis_retry sudo apt-get -y -qq update - - travis_retry sudo apt-get -y install octave - - travis_retry sudo apt-get -y install liboctave-dev - # install MOX unit - - git clone https://github.com/MOxUnit/MOxUnit.git --depth 1 - - cd MOxUnit - - make install - - cd .. - # install MOcov - - git clone https://github.com/MOcov/MOcov.git --depth 1 - - cd MOcov - - make install - - cd .. - # install SPM and the relevant patches for octave - - git clone https://github.com/spm/spm12.git --depth 1 - - make -C spm12/src PLATFORM=octave distclean - - make -C spm12/src PLATFORM=octave - - make -C spm12/src PLATFORM=octave install - # get data - - mkdir demo/output - - curl http://www.fil.ion.ucl.ac.uk/spm/download/data/MoAEpilot/MoAEpilot.bids.zip --output demo/output/MoAEpilot.zip - - unzip demo/output/MoAEpilot.zip -d demo/output/ - - -script: - - octave $OCTFLAGS --eval "runTests" - - cat test_report.log | grep 0 - -after_success: - - bash <(curl -s https://codecov.io/bash) +jobs: + + include: + + # naming the Tests stage + - stage: "Unit tests and coverage" + # names the first job + name: "Unit tests and coverage" + + before_install: + # install octave + - travis_retry sudo apt-get -y -qq update + - travis_retry sudo apt-get -y install octave + - travis_retry sudo apt-get -y install liboctave-dev + # install MOX unit + - git clone https://github.com/MOxUnit/MOxUnit.git --depth 1 + - cd MOxUnit + - make install + - cd .. + # install MOcov + - git clone https://github.com/MOcov/MOcov.git --depth 1 + - cd MOcov + - make install + - cd .. + # install SPM and the relevant patches for octave + - git clone https://github.com/spm/spm12.git --depth 1 + - make -C spm12/src PLATFORM=octave distclean + - make -C spm12/src PLATFORM=octave + - make -C spm12/src PLATFORM=octave install + # get data + - mkdir demo/output + - curl http://www.fil.ion.ucl.ac.uk/spm/download/data/MoAEpilot/MoAEpilot.bids.zip --output demo/output/MoAEpilot.zip + - unzip demo/output/MoAEpilot.zip -d demo/output/ + + script: + - octave $OCTFLAGS --eval "runTests" + - cat test_report.log | grep 0 + # to send the results to codecov to get our code coverage + after_success: + - bash <(curl -s https://codecov.io/bash) + # naming the Linter stage + - stage: Markdown linter + name: "Check markdown" + before_script: + - npm install `cat npm-requirements.txt` + script: + - remark . --frail \ No newline at end of file diff --git a/npm-requirements.txt b/npm-requirements.txt new file mode 100644 index 00000000..0584fc50 --- /dev/null +++ b/npm-requirements.txt @@ -0,0 +1,7 @@ +# list of javascript package to install for the remark markdonw linter +remark-cli@5.0.0 +remark-lint@6.0.2 +remark-preset-lint-recommended@3.0.2 +remark-preset-lint-markdown-style-guide@2.1.2 +remark-preset-lint-consistent + From 73be21f69ea521b0d664c47939b75f5a4b30f3d3 Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 17:00:27 +0200 Subject: [PATCH 2/6] mh fix for the new mh version --- demo/batch_download_run.m | 28 +-- getOption.m | 20 +- runTests.m | 10 +- src/convertOnsetTsvToMat.m | 14 +- src/getBoldFilenameForFFX.m | 10 +- src/getData.m | 14 +- src/getFFXdir.m | 8 +- src/getInfo.m | 22 +- src/getPrefix.m | 19 +- src/getRFXdir.m | 16 +- src/getRealignParamFile.m | 4 +- src/mancoreg/mancoreg.m | 228 +++++++++--------- src/mancoreg/mancoregCallbacks.m | 2 +- src/printProcessingRun.m | 14 +- src/printProcessingSubject.m | 8 +- src/saveMatlabBatch.m | 10 +- src/spmBatching/bidsCopyRawFolder.m | 17 +- src/spmBatching/bidsFFX.m | 24 +- src/spmBatching/bidsRFX.m | 6 +- src/spmBatching/bidsResults.m | 28 +-- src/spmBatching/bidsSpatialPrepro.m | 6 +- src/spmBatching/setBatchCoregistration.m | 24 +- src/spmBatching/setBatchFactorialDesign.m | 2 +- src/spmBatching/setBatchMeanAnatAndMask.m | 12 +- .../setBatchNormalizationSpatialPrepro.m | 78 +++--- src/spmBatching/setBatchRealign.m | 4 +- src/spmBatching/setBatchRealignReslice.m | 4 +- src/spmBatching/setBatchSTC.m | 12 +- src/spmBatching/setBatchSegmentation.m | 12 +- src/spmBatching/setBatchSmoothConImages.m | 3 +- src/spmBatching/setBatchSmoothing.m | 4 +- src/spmBatching/setBatchSubjectLevelGLMSpec.m | 4 +- src/utils/checkDependencies.m | 11 +- src/utils/checkOptions.m | 12 +- src/utils/getEnvInfo.m | 2 +- src/utils/getVersion.m | 2 +- src/utils/inputFileValidation.m | 4 +- src/utils/printCredits.m | 10 +- src/utils/setDefaultFields.m | 6 +- tests/test_checkOptions.m | 12 +- tests/test_createAndReturnOnsetFile.m | 10 +- tests/test_getBoldFilename.m | 7 +- tests/test_getBoldFilenameForFFX.m | 10 +- tests/test_getData.m | 2 +- tests/test_getFFXdir.m | 6 +- tests/test_getFuncVoxelDims.m | 4 +- tests/test_getGrpLevelContrastToCompute.m | 20 +- tests/test_getInfo.m | 14 +- tests/test_getPrefix.m | 16 +- tests/test_getRFXdir.m | 16 +- tests/test_getRealignParamFile.m | 14 +- tests/test_getSliceOrder.m | 28 +-- tests/test_inputFileValidation.m | 10 +- tests/test_modelFiles.m | 6 +- tests/test_saveMatlabBatch.m | 6 +- tests/test_setBatchCoregistration.m | 24 +- .../test_setBatchNormalizationSpatialPrepro.m | 78 +++--- tests/test_setBatchRealign.m | 8 +- tests/test_setBatchSTC.m | 21 +- tests/test_setBatchSegmentation.m | 12 +- tests/test_specifyContrasts.m | 2 +- 61 files changed, 530 insertions(+), 510 deletions(-) diff --git a/demo/batch_download_run.m b/demo/batch_download_run.m index 1f7a5187..b1a67caf 100644 --- a/demo/batch_download_run.m +++ b/demo/batch_download_run.m @@ -49,22 +49,22 @@ % specify the result to compute opt.result.Steps(1) = struct( ... - 'Level', 'subject', ... - 'Contrasts', struct( ... - 'Name', 'listening', ... % has to match - 'Mask', false, ... % this might need improving if a mask is required - 'MC', 'FWE', ... FWE, none, FDR - 'p', 0.05, ... - 'k', 0, ... - 'NIDM', true)); + 'Level', 'subject', ... + 'Contrasts', struct( ... + 'Name', 'listening', ... % has to match + 'Mask', false, ... + 'MC', 'FWE', ... FWE, none, FDR + 'p', 0.05, ... + 'k', 0, ... + 'NIDM', true)); opt.result.Steps(1).Contrasts(2) = struct( ... - 'Name', 'listening_inf_baseline', ... - 'Mask', false, ... - 'MC', 'none', ... FWE, none, FDR - 'p', 0.01, ... - 'k', 0, ... - 'NIDM', true); + 'Name', 'listening_inf_baseline', ... + 'Mask', false, ... + 'MC', 'none', ... FWE, none, FDR + 'p', 0.01, ... + 'k', 0, ... + 'NIDM', true); %% Get data % fprintf('%-40s:', 'Downloading dataset...'); diff --git a/getOption.m b/getOption.m index 214426e4..38af9789 100644 --- a/getOption.m +++ b/getOption.m @@ -28,14 +28,14 @@ % specify the result to compute % Contrasts.Name has to match one of the contrast defined in the model json file opt.result.Steps(1) = struct( ... - 'Level', 'dataset', ... - 'Contrasts', struct( ... - 'Name', 'VisMot_gt_VisStat', ... % - 'Mask', false, ... % this might need improving if a mask is required - 'MC', 'FWE', ... FWE, none, FDR - 'p', 0.05, ... - 'k', 0, ... - 'NIDM', true)); + 'Level', 'dataset', ... + 'Contrasts', struct( ... + 'Name', 'VisMot_gt_VisStat', ... % + 'Mask', false, ... + 'MC', 'FWE', ... FWE, none, FDR + 'p', 0.05, ... + 'k', 0, ... + 'NIDM', true)); % Options for slice time correction % If left unspecified the slice timing will be done using the mid-volume acquisition @@ -52,8 +52,8 @@ % Suffix output directory for the saved jobs opt.jobsDir = fullfile( ... - opt.dataDir, '..', 'derivatives', ... - 'SPM12_CPPL', 'JOBS', opt.taskName); + opt.dataDir, '..', 'derivatives', ... + 'SPM12_CPPL', 'JOBS', opt.taskName); % Save the opt variable as a mat file to load directly in the preprocessing % scripts diff --git a/runTests.m b/runTests.m index 37fa7737..977a0424 100644 --- a/runTests.m +++ b/runTests.m @@ -12,11 +12,11 @@ testFolder = fullfile(pwd, 'tests'); success = moxunit_runtests( ... - testFolder, ... - '-verbose', '-recursive', '-with_coverage', ... - '-cover', folderToCover, ... - '-cover_xml_file', 'coverage.xml', ... - '-cover_html_dir', fullfile(pwd, 'coverage_html')); + testFolder, ... + '-verbose', '-recursive', '-with_coverage', ... + '-cover', folderToCover, ... + '-cover_xml_file', 'coverage.xml', ... + '-cover_html_dir', fullfile(pwd, 'coverage_html')); if success system('echo 0 > test_report.log'); diff --git a/src/convertOnsetTsvToMat.m b/src/convertOnsetTsvToMat.m index 689148a8..2bc1c55b 100644 --- a/src/convertOnsetTsvToMat.m +++ b/src/convertOnsetTsvToMat.m @@ -11,8 +11,8 @@ errorStruct.identifier = 'convertOnsetTsvToMat:nonExistentFile'; errorStruct.message = sprintf('%s\n%s', ... - 'This onset tsv file deos not exist:', ... - tsvFile); + 'This onset tsv file deos not exist:', ... + tsvFile); error(errorStruct); end @@ -25,8 +25,8 @@ errorStruct.identifier = 'convertOnsetTsvToMat:noTrialType'; errorStruct.message = sprintf('%s\n%s', ... - 'There was no trial_type field in this file:', ... - tsvFile); + 'There was no trial_type field in this file:', ... + tsvFile); error(errorStruct); end @@ -55,8 +55,8 @@ if isTrialType{iCond} conditionName = strrep(model.Steps{runIdx}.Model.X{iCond}, ... - 'trial_type.', ... - ''); + 'trial_type.', ... + ''); % Get the index of each condition by comparing the unique names and % each line in the tsv files @@ -76,6 +76,6 @@ fullpathOnsetFileName = fullfile(path, ['onsets_' file '.mat']); save(fullpathOnsetFileName, ... - 'names', 'onsets', 'durations'); + 'names', 'onsets', 'durations'); end diff --git a/src/getBoldFilenameForFFX.m b/src/getBoldFilenameForFFX.m index c897f88b..3faf9fbb 100644 --- a/src/getBoldFilenameForFFX.m +++ b/src/getBoldFilenameForFFX.m @@ -18,13 +18,13 @@ end [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); boldFileName = inputFileValidation( ... - subFuncDataDir, ... - prefix, ... - fileName); + subFuncDataDir, ... + prefix, ... + fileName); disp(boldFileName); diff --git a/src/getData.m b/src/getData.m index f57de516..35f786b2 100644 --- a/src/getData.m +++ b/src/getData.m @@ -87,8 +87,8 @@ fprintf('subjects present\n'); disp(subjects); error([ ... - 'Some of the subjects specified do not exist in this data set.' ... - 'This can be due to wrong zero padding: see opt.zeropad in getOptions']); + 'Some of the subjects specified do not exist in this data set.' ... + 'This can be due to wrong zero padding: see opt.zeropad in getOptions']); end % Number of subjects in the group @@ -108,13 +108,13 @@ switch type case 'bold' metadata = spm_BIDS(BIDS, 'metadata', ... - 'task', opt.taskName, ... - 'sub', subjects{1}, ... - 'type', type); + 'task', opt.taskName, ... + 'sub', subjects{1}, ... + 'type', type); case 'T1w' metadata = spm_BIDS(BIDS, 'metadata', ... - 'sub', subjects{1}, ... - 'type', type); + 'sub', subjects{1}, ... + 'type', type); end if iscell(metadata) diff --git a/src/getFFXdir.m b/src/getFFXdir.m index f79e2e9a..e32cc50e 100644 --- a/src/getFFXdir.m +++ b/src/getFFXdir.m @@ -10,10 +10,10 @@ mvpaSuffix = setMvpaSuffix(isMVPA); ffxDir = fullfile(opt.dataDir, '..', 'derivatives', 'SPM12_CPPL', ... - ['sub-', subID], ... - 'stats', ... - ['ffx_task-', opt.taskName], ... - ['ffx_FWHM-', num2str(funcFWFM), mvpaSuffix]); + ['sub-', subID], ... + 'stats', ... + ['ffx_task-', opt.taskName], ... + ['ffx_FWHM-', num2str(funcFWFM), mvpaSuffix]); if ~exist(ffxDir, 'dir') mkdir(ffxDir); diff --git a/src/getInfo.m b/src/getInfo.m index 219d5773..35e42195 100644 --- a/src/getInfo.m +++ b/src/getInfo.m @@ -29,8 +29,8 @@ case 'sessions' sessions = spm_BIDS(BIDS, 'sessions', ... - 'sub', subID, ... - 'task', opt.taskName); + 'sub', subID, ... + 'task', opt.taskName); nbSessions = size(sessions, 2); if nbSessions == 0 nbSessions = 1; @@ -44,10 +44,10 @@ session = varargin{1}; runs = spm_BIDS(BIDS, 'runs', ... - 'sub', subID, ... - 'task', opt.taskName, ... - 'ses', session, ... - 'type', 'bold'); + 'sub', subID, ... + 'task', opt.taskName, ... + 'ses', session, ... + 'type', 'bold'); nbRuns = size(runs, 2); % Get the number of runs if nbRuns == 0 @@ -62,11 +62,11 @@ [session, run, type] = deal(varargin{:}); fileName = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'run', run, ... - 'ses', session, ... - 'task', opt.taskName, ... - 'type', type); + 'sub', subID, ... + 'run', run, ... + 'ses', session, ... + 'task', opt.taskName, ... + 'type', type); varargout = {fileName}; diff --git a/src/getPrefix.m b/src/getPrefix.m index 99759aad..a8e67c93 100644 --- a/src/getPrefix.m +++ b/src/getPrefix.m @@ -7,12 +7,12 @@ motionRegressorPrefix = ''; allowedPrefixCases = { - 'STC'; ... - 'preprocess'; ... - 'smoothing_space-T1w'; ... - 'smoothing'; ... - 'FFX_space-T1w'; ... - 'FFX'}; + 'STC'; ... + 'preprocess'; ... + 'smoothing_space-T1w'; ... + 'smoothing'; ... + 'FFX_space-T1w'; ... + 'FFX'}; switch step @@ -65,8 +65,9 @@ errorStruct.identifier = 'getPrefix:unknownPrefixCase'; errorStruct.message = sprintf('%s\n%s', ... - 'This prefix case you have requested does not exist: %s.', ... - 'See allowed cases above', step); + ['This prefix case you have requested ' ... + 'does not exist: %s.'], ... + 'See allowed cases above', step); error(errorStruct); end @@ -77,7 +78,7 @@ % Check the slice timing information is not in the metadata and not added % manually in the opt variable. if (isfield(opt.metadata, 'SliceTiming') && ... - ~isempty(opt.metadata.SliceTiming)) || ... + ~isempty(opt.metadata.SliceTiming)) || ... ~isempty(opt.sliceOrder) prefix = [spm_get_defaults('slicetiming.prefix') prefix]; end diff --git a/src/getRFXdir.m b/src/getRFXdir.m index 3c186dcc..24130448 100644 --- a/src/getRFXdir.m +++ b/src/getRFXdir.m @@ -7,14 +7,14 @@ % rfxDir = fullfile( ... - opt.dataDir, ... - '..', ... - 'derivatives', ... - 'SPM12_CPPL', ... - 'group', ... - ['rfx_task-', opt.taskName], ... - ['rfx_funcFWHM-', num2str(funcFWHM), '_conFWHM-', num2str(conFWHM)], ... - contrastName); + opt.dataDir, ... + '..', ... + 'derivatives', ... + 'SPM12_CPPL', ... + 'group', ... + ['rfx_task-', opt.taskName], ... + ['rfx_funcFWHM-', num2str(funcFWHM), '_conFWHM-', num2str(conFWHM)], ... + contrastName); if ~exist(rfxDir, 'dir') mkdir(rfxDir); diff --git a/src/getRealignParamFile.m b/src/getRealignParamFile.m index cda6fbf4..b9c9bc2e 100644 --- a/src/getRealignParamFile.m +++ b/src/getRealignParamFile.m @@ -21,8 +21,8 @@ if ~exist(realignParamFile, 'file') errorStruct.identifier = 'getRealignParamFile:nonExistentFile'; errorStruct.message = sprintf('%s\n%s', ... - 'This realignment file does not exist:', ... - realignParamFile); + 'This realignment file does not exist:', ... + realignParamFile); error(errorStruct); end end diff --git a/src/mancoreg/mancoreg.m b/src/mancoreg/mancoreg.m index d08e11ff..6ad811e4 100644 --- a/src/mancoreg/mancoreg.m +++ b/src/mancoreg/mancoreg.m @@ -111,35 +111,39 @@ function mancoreg(targetimage, sourceimage) % Source/target display toggle mancoregvar.htoggle_off = uicontrol(fg, ... - 'style', 'radiobutton', ... - 'position', ... - [470 100 opt.smallFontBoxWidth * 2 opt.largeFontBoxHeight] .* opt.windowScale, ... - 'Value', 1, ... - 'Callback', 'mancoregCallbacks(''toggle_off'')', ... - 'string', 'OFF'); + 'style', 'radiobutton', ... + 'position', ... + [470 100 opt.smallFontBoxWidth * ... + 2 opt.largeFontBoxHeight] .* opt.windowScale, ... + 'Value', 1, ... + 'Callback', 'mancoregCallbacks(''toggle_off'')', ... + 'string', 'OFF'); mancoregvar.htoggle_on = uicontrol(fg, ... - 'style', 'radiobutton', ... - 'position', ... - [530 100 opt.largeFontBoxWidth opt.largeFontBoxHeight] .* opt.windowScale, ... - 'Value', 0, ... - 'Callback', 'mancoregCallbacks(''toggle_on'')', ... - 'string', 'ON'); + 'style', 'radiobutton', ... + 'position', ... + [530 100 opt.largeFontBoxWidth opt.largeFontBoxHeight] .* ... + opt.windowScale, ... + 'Value', 0, ... + 'Callback', 'mancoregCallbacks(''toggle_on'')', ... + 'string', 'ON'); % "Reset transformation" pushbutton mancoregvar.hreset = uicontrol(fg, ... - 'style', 'pushbutton', ... - 'position', [370 75 220 opt.largeFontBoxHeight] .* opt.windowScale, ... - 'String', 'Reset transformation', ... - 'Callback', 'mancoregCallbacks(''reset'')'); + 'style', 'pushbutton', ... + 'position', [370 75 220 opt.largeFontBoxHeight] .* ... + opt.windowScale, ... + 'String', 'Reset transformation', ... + 'Callback', 'mancoregCallbacks(''reset'')'); % "Apply transformation" pushbutton mancoregvar.hwrite = uicontrol(fg, 'style', 'pushbutton', ... - 'position', [370 50 220 opt.largeFontBoxHeight] .* opt.windowScale, ... - 'String', 'Apply transformation', ... - 'Callback', 'mancoregCallbacks(''apply'')'); + 'position', [370 50 220 opt.largeFontBoxHeight] .* ... + opt.windowScale, ... + 'String', 'Apply transformation', ... + 'Callback', 'mancoregCallbacks(''apply'')'); %% Fill in "transf." fields @@ -156,30 +160,30 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) height = opt.largeFontBoxHeight; uicontrol(fg, ... - 'style', 'text', ... - 'string', 'Manual coregistration tool', ... - 'position', [200 825 300 30] .* windowScale, ... - 'Fontsize', 16, 'backgroundcolor', [1 1 1]); + 'style', 'text', ... + 'string', 'Manual coregistration tool', ... + 'position', [200 825 300 30] .* windowScale, ... + 'Fontsize', 16, 'backgroundcolor', [1 1 1]); uicontrol(fg, ... - 'style', 'frame', ... - 'position', [360 550 240 250] .* windowScale); + 'style', 'frame', ... + 'position', [360 550 240 250] .* windowScale); uicontrol(fg, 'style', 'frame', ... - 'position', [360 40 240 410] .* windowScale); + 'position', [360 40 240 410] .* windowScale); addTextToUI( ... - fg, 'TARGET IMAGE', ... - [370 760 100 height], opt, fontSize); + fg, 'TARGET IMAGE', ... + [370 760 100 height], opt, fontSize); addTextToUI( ... - fg, targetimage, ... - [370 700 220 height * 3], opt, fontSize); + fg, targetimage, ... + [370 700 220 height * 3], opt, fontSize); addTextToUI( ... - fg, 'SOURCE IMAGE', ... - [370 415 100 height], opt, fontSize); + fg, 'SOURCE IMAGE', ... + [370 415 100 height], opt, fontSize); addTextToUI( ... - fg, sourceimage, ... - [370 395 220 height], opt, fontSize); + fg, sourceimage, ... + [370 395 220 height], opt, fontSize); end @@ -194,8 +198,8 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) rowPosition = 360:-20:300; addTextToUI( ... - fg, 'transf.', ... - [370 360 width height], opt, opt.largeFontSize); + fg, 'transf.', ... + [370 360 width height], opt, opt.largeFontSize); for iRow = 1:numel(rowPosition) @@ -211,9 +215,9 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) end mancoregvar = addTextToUI( ... - fg, stringToUse, ... - [xPos yPos width height], opt, fontSize, ... - mancoregvar, fieldName); + fg, stringToUse, ... + [xPos yPos width height], opt, fontSize, ... + mancoregvar, fieldName); end end @@ -234,55 +238,55 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) % set sliders mancoregvar.hpitch = uicontrol(fg, 'style', 'slider', ... - 'position', [430 250 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'position', [430 250 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); mancoregvar.hroll = uicontrol(fg, 'style', 'slider', ... - 'position', [430 225 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'position', [430 225 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); mancoregvar.hyaw = uicontrol(fg, 'style', 'slider', ... - 'position', [430 200 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'position', [430 200 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); % display text xPos = 370; addTextToUI( ... - fg, 'PITCH', ... - [xPos 250 width height], opt, fontSize); + fg, 'PITCH', ... + [xPos 250 width height], opt, fontSize); addTextToUI( ... - fg, 'ROLL', ... - [xPos 225 width height], opt, fontSize); + fg, 'ROLL', ... + [xPos 225 width height], opt, fontSize); addTextToUI( ... - fg, 'YAW', ... - [xPos 200 width height], opt, fontSize); + fg, 'YAW', ... + [xPos 200 width height], opt, fontSize); % display value xPos = 530; mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 250 width height], opt, fontSize, ... - mancoregvar, 'hpitch_val'); + fg, '0', ... + [xPos 250 width height], opt, fontSize, ... + mancoregvar, 'hpitch_val'); mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 225 width height], opt, fontSize, ... - mancoregvar, 'hroll_val'); + fg, '0', ... + [xPos 225 width height], opt, fontSize, ... + mancoregvar, 'hroll_val'); mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 200 width height], opt, fontSize, ... - mancoregvar, 'hyaw_val'); + fg, '0', ... + [xPos 200 width height], opt, fontSize, ... + mancoregvar, 'hyaw_val'); end @@ -302,58 +306,58 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) xPos = 430; mancoregvar.hx = uicontrol(fg, ... - 'style', 'slider', ... - 'position', [xPos 175 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'style', 'slider', ... + 'position', [xPos 175 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); mancoregvar.hy = uicontrol(fg, ... - 'style', 'slider', ... - 'position', [xPos 150 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'style', 'slider', ... + 'position', [xPos 150 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); mancoregvar.hz = uicontrol(fg, ... - 'style', 'slider', ... - 'position', [xPos 125 100 height] .* windowScale, ... - 'Value', 0, ... - 'min', sliderMin, ... - 'max', sliderMax, ... - 'sliderstep', sliderStep, ... - 'Callback', 'mancoregCallbacks(''move'')'); + 'style', 'slider', ... + 'position', [xPos 125 100 height] .* windowScale, ... + 'Value', 0, ... + 'min', sliderMin, ... + 'max', sliderMax, ... + 'sliderstep', sliderStep, ... + 'Callback', 'mancoregCallbacks(''move'')'); % display text xPos = 370; addTextToUI( ... - fg, 'X', ... - [xPos 175 width height], opt, fontSize); + fg, 'X', ... + [xPos 175 width height], opt, fontSize); addTextToUI( ... - fg, 'Y', ... - [xPos 150 width height], opt, fontSize); + fg, 'Y', ... + [xPos 150 width height], opt, fontSize); addTextToUI( ... - fg, 'Z', ... - [xPos 125 width height], opt, fontSize); + fg, 'Z', ... + [xPos 125 width height], opt, fontSize); % display value xPos = 530; mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 175 width height], opt, fontSize, ... - mancoregvar, 'hx_val'); + fg, '0', ... + [xPos 175 width height], opt, fontSize, ... + mancoregvar, 'hx_val'); mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 150 width height], opt, fontSize, ... - mancoregvar, 'hy_val'); + fg, '0', ... + [xPos 150 width height], opt, fontSize, ... + mancoregvar, 'hy_val'); mancoregvar = addTextToUI( ... - fg, '0', ... - [xPos 125 width height], opt, fontSize, ... - mancoregvar, 'hz_val'); + fg, '0', ... + [xPos 125 width height], opt, fontSize, ... + mancoregvar, 'hz_val'); end @@ -367,10 +371,10 @@ function initTitleAndBoxes(opt, fg, targetimage, sourceimage) end handle = uicontrol(fg, ... - 'style', 'text', ... - 'string', textString, ... - 'position', position .* opt.windowScale, ... - 'Fontsize', fontSize); + 'style', 'text', ... + 'string', textString, ... + 'position', position .* opt.windowScale, ... + 'Fontsize', fontSize); if ~isempty(fieldName) mancoregvar.(fieldName) = handle; diff --git a/src/mancoreg/mancoregCallbacks.m b/src/mancoreg/mancoregCallbacks.m index 38856605..cc429f12 100644 --- a/src/mancoreg/mancoregCallbacks.m +++ b/src/mancoreg/mancoregCallbacks.m @@ -181,7 +181,7 @@ function applyTransformationMatrix() for i = 1:size(imagesToReorient, 1) tmp = ... sprintf('Reading current orientations... %.0f%%.\n', i / ... - size(imagesToReorient, 1) * 100); + size(imagesToReorient, 1) * 100); fprintf('%s', tmp); matricesToChange(:, :, i) = spm_get_space(imagesToReorient(i, :)); diff --git a/src/printProcessingRun.m b/src/printProcessingRun.m index fab736ea..6c080570 100644 --- a/src/printProcessingRun.m +++ b/src/printProcessingRun.m @@ -3,12 +3,12 @@ function printProcessingRun(groupName, iSub, subID, iSes, iRun) fprintf(1, ... - [ ... - 'PROCESSING GROUP: %s' ... - 'SUBJECT No.: %i ' ... - 'SUBJECT ID : %s ' ... - 'SESSION: %i ' ... - 'RUN: %i \n'], ... - groupName, iSub, subID, iSes, iRun); + [ ... + 'PROCESSING GROUP: %s' ... + 'SUBJECT No.: %i ' ... + 'SUBJECT ID : %s ' ... + 'SESSION: %i ' ... + 'RUN: %i \n'], ... + groupName, iSub, subID, iSes, iRun); end diff --git a/src/printProcessingSubject.m b/src/printProcessingSubject.m index b8c281ca..d59694d9 100644 --- a/src/printProcessingSubject.m +++ b/src/printProcessingSubject.m @@ -3,9 +3,9 @@ function printProcessingSubject(groupName, iSub, subID) fprintf(1, [ ... - 'PROCESSING GROUP: %s' ... - 'SUBJECT No.: %i ' ... - 'SUBJECT ID : %s \n'], ... - groupName, iSub, subID); + 'PROCESSING GROUP: %s' ... + 'SUBJECT No.: %i ' ... + 'SUBJECT ID : %s \n'], ... + groupName, iSub, subID); end diff --git a/src/saveMatlabBatch.m b/src/saveMatlabBatch.m index 61a067cc..feafe579 100644 --- a/src/saveMatlabBatch.m +++ b/src/saveMatlabBatch.m @@ -20,9 +20,9 @@ function saveMatlabBatch(matlabbatch, batchType, opt, subID) [~, ~, ~] = mkdir(jobsDir); filename = sprintf( ... - '%s_jobs_matlabbatch_SPM12_%s.mat', ... - datestr(now, 'yyyymmdd_HHMM'), ... - batchType); + '%s_jobs_matlabbatch_SPM12_%s.mat', ... + datestr(now, 'yyyymmdd_HHMM'), ... + batchType); [OS, GeneratedBy] = getEnvInfo(); GeneratedBy(1).Description = batchType; @@ -37,7 +37,7 @@ function saveMatlabBatch(matlabbatch, batchType, opt, subID) json.OS = OS; spm_jsonwrite( ... - fullfile(jobsDir, strrep(filename, '.mat', '.json')), ... - json, opts); + fullfile(jobsDir, strrep(filename, '.mat', '.json')), ... + json, opts); end diff --git a/src/spmBatching/bidsCopyRawFolder.m b/src/spmBatching/bidsCopyRawFolder.m index 52c7ceea..00a3fd3e 100644 --- a/src/spmBatching/bidsCopyRawFolder.m +++ b/src/spmBatching/bidsCopyRawFolder.m @@ -68,19 +68,20 @@ function bidsCopyRawFolder(opt, deleteZippedNii) % to get the data 'out' of an eventual datalad dataset try system( ... - sprintf('cp -Lr %s %s', ... - fullfile(rawDir, subDir), ... - fullfile(derivativeDir, subDir))); + sprintf('cp -Lr %s %s', ... + fullfile(rawDir, subDir), ... + fullfile(derivativeDir, subDir))); catch message = [ ... - 'Copying data with system command failed: are you running Windows?\n', ... - 'Will use matlab/octave copyfile command instead.\n', ... - 'Could be an issue if your data set contains symbolic links' ... - '(e.g. if you use datalad or git-annex.']; + 'Copying data with system command failed: ' ... + 'are you running Windows?\n', ... + 'Will use matlab/octave copyfile command instead.\n', ... + 'Could be an issue if your data set contains symbolic links' ... + '(e.g. if you use datalad or git-annex.']; warning(message); copyfile(fullfile(rawDir, subDir), ... - fullfile(derivativeDir, subDir)); + fullfile(derivativeDir, subDir)); end fprintf('folder copied: %s \n', subDir); diff --git a/src/spmBatching/bidsFFX.m b/src/spmBatching/bidsFFX.m index 8dad316a..6143e5c7 100644 --- a/src/spmBatching/bidsFFX.m +++ b/src/spmBatching/bidsFFX.m @@ -49,7 +49,7 @@ function bidsFFX(action, opt, funcFWHM, isMVPA) fprintf(1, 'BUILDING JOB : FMRI design\n'); matlabbatch = setBatchSubjectLevelGLMSpec( ... - BIDS, opt, subID, funcFWHM, isMVPA); + BIDS, opt, subID, funcFWHM, isMVPA); fprintf(1, 'BUILDING JOB : FMRI estimate\n'); @@ -63,19 +63,19 @@ function bidsFFX(action, opt, funcFWHM, isMVPA) 'fMRI model specification: SPM.mat File'; matlabbatch{2}.spm.stats.fmri_est.spmmat(1).src_exbranch = ... substruct( ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); matlabbatch{2}.spm.stats.fmri_est.spmmat(1).src_output = ... substruct('.', 'spmmat'); matlabbatch{2}.spm.stats.fmri_est.method.Classical = 1; matlabbatch{2}.spm.stats.fmri_est.write_residuals = 1; saveMatlabBatch(matlabbatch, ... - ['specifyAndEstimateFfx_task-', opt.taskName, ... - '_FWHM-', num2str(funcFWHM), ... - mvpaSuffix], ... - opt, subID); + ['specifyAndEstimateFfx_task-', opt.taskName, ... + '_FWHM-', num2str(funcFWHM), ... + mvpaSuffix], ... + opt, subID); case 'contrasts' @@ -84,10 +84,10 @@ function bidsFFX(action, opt, funcFWHM, isMVPA) matlabbatch = setBatchSubjectLevelContrasts(opt, subID, funcFWHM, isMVPA); saveMatlabBatch(matlabbatch, ... - ['contrastsFfx_task-', opt.taskName, ... - '_FWHM-', num2str(funcFWHM), ... - mvpaSuffix], ... - opt, subID); + ['contrastsFfx_task-', opt.taskName, ... + '_FWHM-', num2str(funcFWHM), ... + mvpaSuffix], ... + opt, subID); end diff --git a/src/spmBatching/bidsRFX.m b/src/spmBatching/bidsRFX.m index 1a0fb057..5008c82c 100644 --- a/src/spmBatching/bidsRFX.m +++ b/src/spmBatching/bidsRFX.m @@ -48,9 +48,9 @@ function bidsRFX(action, funcFWHM, conFWHM, opt, isMVPA) matlabbatch = setBatchSmoothConImages(group, funcFWHM, conFWHM, opt, isMVPA); saveMatlabBatch( ... - ['smoothCon_FWHM-', num2str(conFWHM), '_task-', opt.taskName], ... - 'STC', ... - opt); + ['smoothCon_FWHM-', num2str(conFWHM), '_task-', opt.taskName], ... + 'STC', ... + opt); spm_jobman('run', matlabbatch); diff --git a/src/spmBatching/bidsResults.m b/src/spmBatching/bidsResults.m index c7cb3d3a..e3c2aecd 100644 --- a/src/spmBatching/bidsResults.m +++ b/src/spmBatching/bidsResults.m @@ -38,13 +38,13 @@ function bidsResults(opt, funcFWHM, conFWHM, isMVPA) matlabbatch = ... setBatchSubjectLevelResults( ... - matlabbatch, ... - group, ... - funcFWHM, ... - opt, ... - isMVPA, ... - iStep, ... - iCon); + matlabbatch, ... + group, ... + funcFWHM, ... + opt, ... + isMVPA, ... + iStep, ... + iCon); % TODO % Save this batch in for each subject and not once for all @@ -63,7 +63,7 @@ function bidsResults(opt, funcFWHM, conFWHM, isMVPA) results.nbSubj = SPM.nscan; matlabbatch = resultsMatlabbatch( ... - matlabbatch, opt, iStep, iCon, results); + matlabbatch, opt, iStep, iCon, results); saveMatlabBatch(matlabbatch, 'computeRfxResults', opt); @@ -148,16 +148,16 @@ function bidsResults(opt, funcFWHM, conFWHM, isMVPA) % identify which contrast nb actually has the name the user asked conNb = find( ... - strcmp({SPM.xCon.name}', ... - opt.result.Steps(iStep).Contrasts(iCon).Name)); + strcmp({SPM.xCon.name}', ... + opt.result.Steps(iStep).Contrasts(iCon).Name)); if isempty(conNb) sprintf('List of contrast in this SPM file'); disp({SPM.xCon.name}'); error( ... - 'This SPM file %s does not contain a contrast named %s', ... - fullfile(ffxDir, 'SPM.mat'), ... - opt.result.Steps(1).Contrasts(iCon).Name); + 'This SPM file %s does not contain a contrast named %s', ... + fullfile(ffxDir, 'SPM.mat'), ... + opt.result.Steps(1).Contrasts(iCon).Name); end results.dir = ffxDir; @@ -166,7 +166,7 @@ function bidsResults(opt, funcFWHM, conFWHM, isMVPA) results.nbSubj = 1; batch = resultsMatlabbatch( ... - batch, opt, iStep, iCon, results); + batch, opt, iStep, iCon, results); end end diff --git a/src/spmBatching/bidsSpatialPrepro.m b/src/spmBatching/bidsSpatialPrepro.m index f80bad1a..ac0ce49c 100644 --- a/src/spmBatching/bidsSpatialPrepro.m +++ b/src/spmBatching/bidsSpatialPrepro.m @@ -55,9 +55,9 @@ function bidsSpatialPrepro(opt) fprintf(1, ' BUILDING SPATIAL JOB : SELECTING ANATOMCAL\n'); % get all T1w images for that subject and anat = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'ses', sessions{structSession}, ... - 'type', 'T1w'); + 'sub', subID, ... + 'ses', sessions{structSession}, ... + 'type', 'T1w'); % we assume that the first T1w is the correct one (could be an % issue for dataset with more than one diff --git a/src/spmBatching/setBatchCoregistration.m b/src/spmBatching/setBatchCoregistration.m index 8cab1761..243353da 100644 --- a/src/spmBatching/setBatchCoregistration.m +++ b/src/spmBatching/setBatchCoregistration.m @@ -12,8 +12,8 @@ 'Named File Selector: Structural(1) - Files'; matlabbatch{end}.spm.spatial.coreg.estimate.ref(1).src_exbranch = ... substruct( ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); matlabbatch{end}.spm.spatial.coreg.estimate.ref(1).src_output = ... substruct('.', 'files', '{}', {1}); @@ -29,10 +29,10 @@ 'Realign: Estimate & Reslice: Mean Image'; matlabbatch{end}.spm.spatial.coreg.estimate.source(1).src_exbranch = ... substruct( ... - '.', 'val', '{}', {2}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {2}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); matlabbatch{end}.spm.spatial.coreg.estimate.source(1).src_output = ... substruct('.', 'rmean'); @@ -54,14 +54,14 @@ ['Realign: Estimate & Reslice: Realigned Images (Sess ' (iSes) ')']; matlabbatch{end}.spm.spatial.coreg.estimate.other(iSes).src_exbranch = ... substruct( ... - '.', 'val', '{}', {2}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {2}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); matlabbatch{end}.spm.spatial.coreg.estimate.other(iSes).src_output = ... substruct( ... - '.', 'sess', '()', {iSes}, ... - '.', 'cfiles'); + '.', 'sess', '()', {iSes}, ... + '.', 'cfiles'); end % The following lines are commented out because those parameters diff --git a/src/spmBatching/setBatchFactorialDesign.m b/src/spmBatching/setBatchFactorialDesign.m index 4414c800..7649cc84 100644 --- a/src/spmBatching/setBatchFactorialDesign.m +++ b/src/spmBatching/setBatchFactorialDesign.m @@ -66,7 +66,7 @@ matlabbatch{j}.spm.stats.factorial_design.masking.tm.tm_none = 1; matlabbatch{j}.spm.stats.factorial_design.masking.im = 1; matlabbatch{j}.spm.stats.factorial_design.masking.em = { ... - fullfile(rfxDir, 'MeanMask.nii')}; + fullfile(rfxDir, 'MeanMask.nii')}; matlabbatch{j}.spm.stats.factorial_design.globalc.g_omit = 1; matlabbatch{j}.spm.stats.factorial_design.globalm.gmsca.gmsca_no = 1; matlabbatch{j}.spm.stats.factorial_design.globalm.glonorm = 1; diff --git a/src/spmBatching/setBatchMeanAnatAndMask.m b/src/spmBatching/setBatchMeanAnatAndMask.m index 669e41ac..84ba32e9 100644 --- a/src/spmBatching/setBatchMeanAnatAndMask.m +++ b/src/spmBatching/setBatchMeanAnatAndMask.m @@ -25,8 +25,8 @@ %% STRUCTURAL struct = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'type', 'T1w'); + 'sub', subID, ... + 'type', 'T1w'); % we assume that the first T1w is the correct one (could be an % issue for data set with more than one struct = struct{1}; @@ -41,10 +41,10 @@ end files = inputFileValidation( ... - subStrucDataDir, ... - [spm_get_defaults('normalise.write.prefix'), ... - spm_get_defaults('deformations.modulate.prefix')], ... - fileName); + subStrucDataDir, ... + [spm_get_defaults('normalise.write.prefix'), ... + spm_get_defaults('deformations.modulate.prefix')], ... + fileName); matlabbatch{1}.spm.util.imcalc.input{subCounter, :} = files{1}; diff --git a/src/spmBatching/setBatchNormalizationSpatialPrepro.m b/src/spmBatching/setBatchNormalizationSpatialPrepro.m index ed543251..8dc17973 100644 --- a/src/spmBatching/setBatchNormalizationSpatialPrepro.m +++ b/src/spmBatching/setBatchNormalizationSpatialPrepro.m @@ -8,11 +8,11 @@ deformationField = ... cfg_dep('Segment: Forward Deformations', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'fordef', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'fordef', '()', {':'})); % we set images to be resampled at the voxel size we had at acquisition matlabbatch = setBatchNormalize(matlabbatch, deformationField, voxDim); @@ -21,24 +21,24 @@ matlabbatch{jobsToAdd}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Coregister: Estimate: Coregistered Images', ... - substruct( ... - '.', 'val', '{}', {3}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'cfiles')); + substruct( ... + '.', 'val', '{}', {3}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'cfiles')); % NORMALIZE STRUCTURAL fprintf(1, ' BUILDING SPATIAL JOB : NORMALIZE STRUCTURAL\n'); matlabbatch{jobsToAdd + 1}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: Bias Corrected (1)', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'channel', '()', {1}, ... - '.', 'biascorr', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'channel', '()', {1}, ... + '.', 'biascorr', '()', {':'})); % size 3 allow to run RunQA / original voxel size at acquisition matlabbatch{jobsToAdd + 1}.spm.spatial.normalise.write.woptions.vox = [1 1 1]; @@ -46,36 +46,36 @@ fprintf(1, ' BUILDING SPATIAL JOB : NORMALIZE GREY MATTER\n'); matlabbatch{jobsToAdd + 2}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c1 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {1}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {1}, ... + '.', 'c', '()', {':'})); % NORMALIZE WHITE MATTER fprintf(1, ' BUILDING SPATIAL JOB : NORMALIZE WHITE MATTER\n'); matlabbatch{jobsToAdd + 3}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c2 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {2}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {2}, ... + '.', 'c', '()', {':'})); % NORMALIZE CSF MATTER fprintf(1, ' BUILDING SPATIAL JOB : NORMALIZE CSF\n'); matlabbatch{jobsToAdd + 4}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c3 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {3}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {3}, ... + '.', 'c', '()', {':'})); end diff --git a/src/spmBatching/setBatchRealign.m b/src/spmBatching/setBatchRealign.m index 69b96556..a9c7447b 100644 --- a/src/spmBatching/setBatchRealign.m +++ b/src/spmBatching/setBatchRealign.m @@ -20,8 +20,8 @@ % get the filename for this bold run for this task [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); % check that the file with the right prefix exist and we get and % save its voxeldimension diff --git a/src/spmBatching/setBatchRealignReslice.m b/src/spmBatching/setBatchRealignReslice.m index 76cfbc82..21486609 100644 --- a/src/spmBatching/setBatchRealignReslice.m +++ b/src/spmBatching/setBatchRealignReslice.m @@ -22,8 +22,8 @@ % get the filename for this bold run for this task [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); % check that the file with the right prefix exist prefix = getPrefix('preprocess', opt); diff --git a/src/spmBatching/setBatchSTC.m b/src/spmBatching/setBatchSTC.m index b4ea3830..6483052e 100644 --- a/src/spmBatching/setBatchSTC.m +++ b/src/spmBatching/setBatchSTC.m @@ -43,10 +43,10 @@ end if referenceSlice > TA error('%s (%f) %s (%f).\n%s', ... - 'The reference slice time', referenceSlice, ... - 'is greater than the acquisition time', TA, ... - ['Reference slice time must be in milliseconds ' ... - 'or leave it empty to use mid-acquisition time as reference.']); + 'The reference slice time', referenceSlice, ... + 'is greater than the acquisition time', TA, ... + ['Reference slice time must be in milliseconds ' ... + 'or leave it empty to use mid-acquisition time as reference.']); end % prefix of the files to look for @@ -65,8 +65,8 @@ % get the filename for this bold run for this task [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); % check that the file with the right prefix exist files = inputFileValidation(subFuncDataDir, prefix, fileName); diff --git a/src/spmBatching/setBatchSegmentation.m b/src/spmBatching/setBatchSegmentation.m index 5abb8320..c4bd3285 100644 --- a/src/spmBatching/setBatchSegmentation.m +++ b/src/spmBatching/setBatchSegmentation.m @@ -8,12 +8,12 @@ % SAVE BIAS CORRECTED IMAGE matlabbatch{end + 1}.spm.spatial.preproc.channel.vols(1) = ... cfg_dep('Named File Selector: Structural(1) - Files', ... - substruct( ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'files', '{}', {1})); + substruct( ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'files', '{}', {1})); matlabbatch{end}.spm.spatial.preproc.channel.biasreg = 0.001; matlabbatch{end}.spm.spatial.preproc.channel.biasfwhm = 60; matlabbatch{end}.spm.spatial.preproc.channel.write = [0 1]; diff --git a/src/spmBatching/setBatchSmoothConImages.m b/src/spmBatching/setBatchSmoothConImages.m index 1f48c6f0..26a9e871 100644 --- a/src/spmBatching/setBatchSmoothConImages.m +++ b/src/spmBatching/setBatchSmoothConImages.m @@ -29,7 +29,8 @@ [conFWHM conFWHM conFWHM]; matlabbatch{counter}.spm.spatial.smooth.dtype = 0; matlabbatch{counter}.spm.spatial.smooth.prefix = [ ... - spm_get_defaults('smooth.prefix'), num2str(conFWHM)]; + spm_get_defaults('smooth.prefix'), ... + num2str(conFWHM)]; end end diff --git a/src/spmBatching/setBatchSmoothing.m b/src/spmBatching/setBatchSmoothing.m index ff4bb1a4..8e0e2fcc 100644 --- a/src/spmBatching/setBatchSmoothing.m +++ b/src/spmBatching/setBatchSmoothing.m @@ -27,8 +27,8 @@ % get the filename for this bold run for this task [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); % check that the file with the right prefix exist files = inputFileValidation(subFuncDataDir, prefix, fileName); diff --git a/src/spmBatching/setBatchSubjectLevelGLMSpec.m b/src/spmBatching/setBatchSubjectLevelGLMSpec.m index 11d518d1..b0f10b36 100644 --- a/src/spmBatching/setBatchSubjectLevelGLMSpec.m +++ b/src/spmBatching/setBatchSubjectLevelGLMSpec.m @@ -16,8 +16,8 @@ % to set the number of time bins we will use to upsample our model % during regression creation fileName = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'type', 'bold'); + 'sub', subID, ... + 'type', 'bold'); fileName = strrep(fileName{1}, '.gz', ''); hdr = spm_vol(fileName); % we are assuming axial acquisition here diff --git a/src/utils/checkDependencies.m b/src/utils/checkDependencies.m index a05be61e..5f422f8e 100644 --- a/src/utils/checkDependencies.m +++ b/src/utils/checkDependencies.m @@ -10,7 +10,7 @@ function checkDependencies() nifti_tools_url = ... ['https://www.mathworks.com/matlabcentral/fileexchange/' ... - '8797-tools-for-nifti-and-analyze-image']; + '8797-tools-for-nifti-and-analyze-image']; fprintf('Checking dependencies\n'); @@ -20,8 +20,8 @@ function checkDependencies() fprintf(' Using %s %s\n', a, b); if any(~[strcmp(a, SPM_main) strcmp(b, SPM_sub)]) str = sprintf('%s %s %s.\n%s', ... - 'The current version SPM version is not', SPM_main, SPM_sub, ... - 'In case of problems (e.g json file related) consider updating.'); + 'The current version SPM version is not', SPM_main, SPM_sub, ... + 'In case of problems (e.g json file related) consider updating.'); warning(str); %#ok<*SPWRN> end catch @@ -34,8 +34,9 @@ function checkDependencies() if isempty(a) errorStruct.identifier = 'checkDependencies:missingDependency'; errorStruct.message = sprintf('%s \n%s', ... - 'Failed to find the Nifti tools: Are you sure they in the matlab path?', ... - 'You can download them here: %s', nifti_tools_url); + ['Failed to find the Nifti tools: ' ... + 'Are you sure they in the matlab path?'], ... + 'You can download them here: %s', nifti_tools_url); error(errorStruct); else fprintf(' Nifti tools detected\n'); diff --git a/src/utils/checkOptions.m b/src/utils/checkOptions.m index a6061c42..226b960d 100644 --- a/src/utils/checkOptions.m +++ b/src/utils/checkOptions.m @@ -19,7 +19,7 @@ errorStruct.identifier = 'checkOptions:groupNotString'; errorStruct.message = sprintf( ... - 'All group names should be string.'); + 'All group names should be string.'); error(errorStruct); end @@ -28,8 +28,9 @@ errorStruct.identifier = 'checkOptions:refSliceNotScalar'; errorStruct.message = sprintf( ... - 'options.STC_referenceSlice should be a scalar. \nCurrent value is: %d', ... - opt.STC_referenceSlice); + ['options.STC_referenceSlice should be a scalar.' ... + '\nCurrent value is: %d'], ... + opt.STC_referenceSlice); error(errorStruct); end @@ -38,8 +39,9 @@ errorStruct.identifier = 'checkOptions:voxDim'; errorStruct.message = sprintf( ... - 'opt.funcVoxelDims should be a vector of length 3. \nCurrent value is: %d', ... - opt.funcVoxelDims); + ['opt.funcVoxelDims should be a vector of length 3. '... + '\nCurrent value is: %d'], ... + opt.funcVoxelDims); error(errorStruct); end diff --git a/src/utils/getEnvInfo.m b/src/utils/getEnvInfo.m index 3d99e14c..bc72f021 100644 --- a/src/utils/getEnvInfo.m +++ b/src/utils/getEnvInfo.m @@ -54,7 +54,7 @@ end [~, out] = system(cmd); vars = regexp(strtrim(out), '^(.*)=(.*)$', ... - 'tokens', 'lineanchors', 'dotexceptnewline'); + 'tokens', 'lineanchors', 'dotexceptnewline'); vars = vertcat(vars{:}); keys = vars(:, 1); vals = vars(:, 2); diff --git a/src/utils/getVersion.m b/src/utils/getVersion.m index f79c8dd6..c9ac17be 100644 --- a/src/utils/getVersion.m +++ b/src/utils/getVersion.m @@ -3,7 +3,7 @@ function versionNumber = getVersion() try versionNumber = fileread(fullfile(fileparts(mfilename('fullpath')), ... - '..', '..', 'version.txt')); + '..', '..', 'version.txt')); catch versionNumber = 'v0.0.3'; end diff --git a/src/utils/inputFileValidation.m b/src/utils/inputFileValidation.m index 9c555f4c..b3c0e880 100644 --- a/src/utils/inputFileValidation.m +++ b/src/utils/inputFileValidation.m @@ -7,8 +7,8 @@ errorStruct.identifier = 'inputFileValidation:nonExistentFile'; errorStruct.message = sprintf( ... - 'This file does not exist: %s', ... - fullfile(dir, [prefix fileName '[.gz]'])); + 'This file does not exist: %s', ... + fullfile(dir, [prefix fileName '[.gz]'])); error(errorStruct); else diff --git a/src/utils/printCredits.m b/src/utils/printCredits.m index 2fb1090c..ed6ddf79 100644 --- a/src/utils/printCredits.m +++ b/src/utils/printCredits.m @@ -5,11 +5,11 @@ function printCredits() versionNumber = getVersion(); contributors = { ... - 'Mohamed Rezk', ... - 'Rémi Gau', ... - 'Olivier Collignon', ... - 'Ane Gurtubay', ... - 'Marco Barilari'}; + 'Mohamed Rezk', ... + 'Rémi Gau', ... + 'Olivier Collignon', ... + 'Ane Gurtubay', ... + 'Marco Barilari'}; DOI_URL = 'https://doi.org/10.5281/zenodo.3554331.'; diff --git a/src/utils/setDefaultFields.m b/src/utils/setDefaultFields.m index f2d199aa..b35286fb 100644 --- a/src/utils/setDefaultFields.m +++ b/src/utils/setDefaultFields.m @@ -24,9 +24,9 @@ else structure = setFieldToIfNotPresent( ... - structure, ... - names{i}, ... - thisField); + structure, ... + names{i}, ... + thisField); end end diff --git a/tests/test_checkOptions.m b/tests/test_checkOptions.m index 46f5e45b..43f562e3 100644 --- a/tests/test_checkOptions.m +++ b/tests/test_checkOptions.m @@ -33,8 +33,8 @@ function test_checkOptionsErrorGroup() opt.groups = {1}; assertExceptionThrown( ... - @()checkOptions(opt), ... - 'checkOptions:groupNotString'); + @()checkOptions(opt), ... + 'checkOptions:groupNotString'); end @@ -43,8 +43,8 @@ function test_checkOptionsErrorRefSlice() opt.STC_referenceSlice = [1:10]; assertExceptionThrown( ... - @()checkOptions(opt), ... - 'checkOptions:refSliceNotScalar'); + @()checkOptions(opt), ... + 'checkOptions:refSliceNotScalar'); end @@ -53,8 +53,8 @@ function test_checkOptionsErrorVoxDim() opt.funcVoxelDims = [1:10]; assertExceptionThrown( ... - @()checkOptions(opt), ... - 'checkOptions:voxDim'); + @()checkOptions(opt), ... + 'checkOptions:voxDim'); end diff --git a/tests/test_createAndReturnOnsetFile.m b/tests/test_createAndReturnOnsetFile.m index 5832e07f..0c0e6434 100644 --- a/tests/test_createAndReturnOnsetFile.m +++ b/tests/test_createAndReturnOnsetFile.m @@ -19,8 +19,8 @@ function test_createAndReturnOnsetFileBasic() opt.groups = {''}; opt.subjects = {'01'}; opt.model.univariate.file = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'model', ... - 'model-vislocalizer_smdl.json'); + 'dummyData', 'model', ... + 'model-vislocalizer_smdl.json'); [~, opt, BIDS] = getData(opt); @@ -29,9 +29,9 @@ function test_createAndReturnOnsetFileBasic() onsetFileName = createAndReturnOnsetFile(opt, subID, funcFWHM, boldFileName, isMVPA); expectedFileName = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'stats', ... - 'ffx_task-vislocalizer', 'ffx_FWHM-6', ... - 'onsets_sub-01_ses-01_task-vislocalizer_events.mat'); + 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'stats', ... + 'ffx_task-vislocalizer', 'ffx_FWHM-6', ... + 'onsets_sub-01_ses-01_task-vislocalizer_events.mat'); assertEqual(exist(onsetFileName, 'file'), 2); assertEqual(exist(expectedFileName, 'file'), 2); diff --git a/tests/test_getBoldFilename.m b/tests/test_getBoldFilename.m index 29d93219..a04cd215 100644 --- a/tests/test_getBoldFilename.m +++ b/tests/test_getBoldFilename.m @@ -25,13 +25,14 @@ function test_getBoldFilenameBasic() runs = getInfo(BIDS, subID, opt, 'Runs', sessions{iSes}); [fileName, subFuncDataDir] = getBoldFilename( ... - BIDS, ... - subID, sessions{iSes}, runs{iRun}, opt); + BIDS, ... + subID, sessions{iSes}, runs{iRun}, opt); expectedFileName = 'sub-01_ses-01_task-vislocalizer_bold.nii'; expectedSubFuncDataDir = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'ses-01', 'func'); + 'dummyData', 'derivatives', 'SPM12_CPPL', ... + 'sub-01', 'ses-01', 'func'); assertEqual(expectedSubFuncDataDir, subFuncDataDir); assertEqual(expectedFileName, fileName); diff --git a/tests/test_getBoldFilenameForFFX.m b/tests/test_getBoldFilenameForFFX.m index 2ef048f9..aaea3a61 100644 --- a/tests/test_getBoldFilenameForFFX.m +++ b/tests/test_getBoldFilenameForFFX.m @@ -23,8 +23,9 @@ function test_getBoldFilenameForFFXBasic() [boldFileName, prefix] = getBoldFilenameForFFX(BIDS, opt, subID, funcFWHM, iSes, iRun); expectedFileName = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... - 's6wsub-01_ses-01_task-vislocalizer_bold.nii'); + 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', ... + 'ses-01', 'func', ... + 's6wsub-01_ses-01_task-vislocalizer_bold.nii'); assertEqual('s6w', prefix); assertEqual({expectedFileName}, boldFileName); @@ -49,8 +50,9 @@ function test_getBoldFilenameForFFXNativeSpace() [boldFileName, prefix] = getBoldFilenameForFFX(BIDS, opt, subID, funcFWHM, iSes, iRun); expectedFileName = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... - 's6rsub-01_ses-01_task-vislocalizer_bold.nii'); + 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', ... + 'ses-01', 'func', ... + 's6rsub-01_ses-01_task-vislocalizer_bold.nii'); assertEqual('s6r', prefix); assertEqual({expectedFileName}, boldFileName); diff --git a/tests/test_getData.m b/tests/test_getData.m index ab701d7d..c70a1b97 100644 --- a/tests/test_getData.m +++ b/tests/test_getData.m @@ -22,7 +22,7 @@ function test_getDataBasic() assert(isequal(group(1).name, '')); assert(isequal(group.numSub, 6)); assert(isequal(group.subNumber, ... - {'01' '02' 'blind01' 'blind02' 'ctrl01' 'ctrl02'})); + {'01' '02' 'blind01' 'blind02' 'ctrl01' 'ctrl02'})); %% Get some subjects of some groups opt.groups = {'ctrl', 'blind'}; diff --git a/tests/test_getFFXdir.m b/tests/test_getFFXdir.m index 9437a726..1a188b3a 100644 --- a/tests/test_getFFXdir.m +++ b/tests/test_getFFXdir.m @@ -15,7 +15,8 @@ function test_getFFXdirBasic() opt.taskName = 'funcLocalizer'; expectedOutput = fullfile(fileparts(mfilename('fullpath')), 'dummyData', 'derivatives', ... - 'SPM12_CPPL', 'sub-01', 'stats', 'ffx_task-funcLocalizer', 'ffx_FWHM-0'); + 'SPM12_CPPL', 'sub-01', 'stats', 'ffx_task-funcLocalizer', ... + 'ffx_FWHM-0'); ffxDir = getFFXdir(subID, funcFWFM, opt, isMVPA); @@ -32,7 +33,8 @@ function test_getFFXdirMvpa() opt.taskName = 'nBack'; expectedOutput = fullfile(fileparts(mfilename('fullpath')), 'dummyData', 'derivatives', ... - 'SPM12_CPPL', 'sub-02', 'stats', 'ffx_task-nBack', 'ffx_FWHM-6_MVPA'); + 'SPM12_CPPL', 'sub-02', 'stats', 'ffx_task-nBack', ... + 'ffx_FWHM-6_MVPA'); ffxDir = getFFXdir(subID, funcFWFM, opt, isMVPA); diff --git a/tests/test_getFuncVoxelDims.m b/tests/test_getFuncVoxelDims.m index 4c68bce6..b5f538ca 100644 --- a/tests/test_getFuncVoxelDims.m +++ b/tests/test_getFuncVoxelDims.m @@ -11,7 +11,7 @@ function test_getFuncVoxelDimsBasic() opt.funcVoxelDims = []; subFuncDataDir = fullfile(fileparts(mfilename('fullpath')), '..', 'demo', ... - 'output', 'MoAEpilot', 'sub-01', 'func'); + 'output', 'MoAEpilot', 'sub-01', 'func'); prefix = ''; @@ -32,7 +32,7 @@ function test_getFuncVoxelDimsForce() opt.funcVoxelDims = [1 1 1]; subFuncDataDir = fullfile(fileparts(mfilename('fullpath')), '..', 'demo', ... - 'output', 'MoAEpilot', 'sub-01', 'func'); + 'output', 'MoAEpilot', 'sub-01', 'func'); prefix = ''; diff --git a/tests/test_getGrpLevelContrastToCompute.m b/tests/test_getGrpLevelContrastToCompute.m index eed1495e..0097a722 100644 --- a/tests/test_getGrpLevelContrastToCompute.m +++ b/tests/test_getGrpLevelContrastToCompute.m @@ -10,15 +10,15 @@ function test_getGrpLevelContrastToComputeBasic() isMVPA = false; opt.model.univariate.file = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'model', 'model-visMotionLoc_smdl.json'); + 'dummyData', 'model', 'model-visMotionLoc_smdl.json'); [grpLvlCon, iStep] = getGrpLevelContrastToCompute(opt, isMVPA); AutoContrasts = { - 'trial_type.VisMot'; ... - 'trial_type.VisStat'; ... - 'VisMot_gt_VisStat'; ... - 'VisStat_gt_VisMot'}; + 'trial_type.VisMot'; ... + 'trial_type.VisStat'; ... + 'VisMot_gt_VisStat'; ... + 'VisStat_gt_VisMot'}; assertEqual(iStep, 2); assertEqual(grpLvlCon, AutoContrasts); @@ -26,15 +26,15 @@ function test_getGrpLevelContrastToComputeBasic() %% isMVPA = true; opt.model.multivariate.file = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'model', 'model-vislocalizer_smdl.json'); + 'dummyData', 'model', 'model-vislocalizer_smdl.json'); [grpLvlCon, iStep] = getGrpLevelContrastToCompute(opt, isMVPA); AutoContrasts = { - 'trial_type.VisMot'; ... - 'trial_type.VisStat'; ... - 'VisMot_gt_VisStat'; ... - 'VisStat_gt_VisMot'}; + 'trial_type.VisMot'; ... + 'trial_type.VisStat'; ... + 'VisMot_gt_VisStat'; ... + 'VisStat_gt_VisMot'}; assertEqual(iStep, 3); assertEqual(grpLvlCon, AutoContrasts); diff --git a/tests/test_getInfo.m b/tests/test_getInfo.m index 85d1132d..99ef0ea0 100644 --- a/tests/test_getInfo.m +++ b/tests/test_getInfo.m @@ -50,13 +50,13 @@ function test_getInfoBasic() [~, opt, BIDS] = getData(opt); filename = getInfo(BIDS, subID, opt, info, session, run, 'bold'); FileName = fullfile(fileparts(mfilename('fullpath')), 'dummyData', ... - 'derivatives', 'SPM12_CPPL', ... - ['sub-' subID], ['ses-' session], 'func', ... - ['sub-' subID, ... - '_ses-' session, ... - '_task-' opt.taskName, ... - '_run-' run, ... - '_bold.nii']); + 'derivatives', 'SPM12_CPPL', ... + ['sub-' subID], ['ses-' session], 'func', ... + ['sub-' subID, ... + '_ses-' session, ... + '_task-' opt.taskName, ... + '_run-' run, ... + '_bold.nii']); assert(strcmp(filename{1}, FileName)); diff --git a/tests/test_getPrefix.m b/tests/test_getPrefix.m index 207ae954..80a2e7af 100644 --- a/tests/test_getPrefix.m +++ b/tests/test_getPrefix.m @@ -99,9 +99,9 @@ function test_getPrefixFFX() opt.sliceOrder = []; expectedPrefxOutput = [ ... - spm_get_defaults('smooth.prefix'), ... - num2str(funcFWHM), ... - spm_get_defaults('normalise.write.prefix')]; + spm_get_defaults('smooth.prefix'), ... + num2str(funcFWHM), ... + spm_get_defaults('normalise.write.prefix')]; expectedMotionRegressorPrefix = ''; [prefix, motionRegressorPrefix] = getPrefix(step, opt, funcFWHM); @@ -119,9 +119,9 @@ function test_getPrefixFFXT1w() opt.sliceOrder = []; expectedPrefxOutput = [ ... - spm_get_defaults('smooth.prefix'), ... - num2str(funcFWHM), ... - spm_get_defaults('realign.write.prefix')]; + spm_get_defaults('smooth.prefix'), ... + num2str(funcFWHM), ... + spm_get_defaults('realign.write.prefix')]; expectedMotionRegressorPrefix = ''; [prefix, motionRegressorPrefix] = getPrefix(step, opt, funcFWHM); @@ -139,7 +139,7 @@ function test_getPrefixError() opt.sliceOrder = []; assertExceptionThrown( ... - @()getPrefix(step, opt, funcFWHM), ... - 'getPrefix:unknownPrefixCase'); + @()getPrefix(step, opt, funcFWHM), ... + 'getPrefix:unknownPrefixCase'); end diff --git a/tests/test_getRFXdir.m b/tests/test_getRFXdir.m index 6c0589c7..1cbeaaae 100644 --- a/tests/test_getRFXdir.m +++ b/tests/test_getRFXdir.m @@ -18,14 +18,14 @@ function test_getRFXdirBasic() rfxDir = getRFXdir(opt, funcFWHM, conFWHM, contrastName); expectedOutput = fullfile( ... - fileparts(mfilename('fullpath')), ... - 'dummyData', ... - 'derivatives', ... - 'SPM12_CPPL', ... - 'group', ... - 'rfx_task-funcLocalizer', ... - 'rfx_funcFWHM-0_conFWHM-0', ... - 'stim_gt_baseline'); + fileparts(mfilename('fullpath')), ... + 'dummyData', ... + 'derivatives', ... + 'SPM12_CPPL', ... + 'group', ... + 'rfx_task-funcLocalizer', ... + 'rfx_funcFWHM-0_conFWHM-0', ... + 'stim_gt_baseline'); assertEqual(exist(expectedOutput, 'dir'), 7); diff --git a/tests/test_getRealignParamFile.m b/tests/test_getRealignParamFile.m index 123ad729..708afa1b 100644 --- a/tests/test_getRealignParamFile.m +++ b/tests/test_getRealignParamFile.m @@ -24,8 +24,9 @@ function test_getRealignParamFileBasic() realignParamFile = getRealignParamFile(opt, boldFileName, funcFWHM); expectedFileName = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... - 'rp_sub-01_ses-01_task-vislocalizer_bold.txt'); + 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', ... + 'ses-01', 'func', ... + 'rp_sub-01_ses-01_task-vislocalizer_bold.txt'); assertEqual(expectedFileName, realignParamFile); @@ -50,8 +51,9 @@ function test_getRealignParamFileNativeSpace() realignParamFile = getRealignParamFile(opt, boldFileName, funcFWHM); expectedFileName = fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... - 'rp_sub-01_ses-01_task-vislocalizer_bold.txt'); + 'dummyData', 'derivatives', 'SPM12_CPPL', 'sub-01', ... + 'ses-01', 'func', ... + 'rp_sub-01_ses-01_task-vislocalizer_bold.txt'); assertEqual(expectedFileName, realignParamFile); @@ -76,7 +78,7 @@ function test_getRealignParamFileError() boldFileName = getBoldFilenameForFFX(BIDS, opt, subID, goodFuncFWHM, iSes, iRun); assertExceptionThrown( ... - @()getRealignParamFile(opt, boldFileName, badFuncFWHM), ... - 'getRealignParamFile:nonExistentFile'); + @()getRealignParamFile(opt, boldFileName, badFuncFWHM), ... + 'getRealignParamFile:nonExistentFile'); end diff --git a/tests/test_getSliceOrder.m b/tests/test_getSliceOrder.m index ffb77c93..59ddf4de 100644 --- a/tests/test_getSliceOrder.m +++ b/tests/test_getSliceOrder.m @@ -15,20 +15,20 @@ function test_getSliceOrderBasic() %% Get slice order from BIDS sliceOrder = repmat( ... - [0.5475; ... - 0; ... - 0.3825; ... - 0.0550; ... - 0.4375; ... - 0.1100; ... - 0.4925; ... - 0.2200; ... - 0.6025; ... - 0.2750; ... - 0.6575; ... - 0.3275; ... - 0.7100; ... - 0.1650], [3, 1]); + [0.5475; ... + 0; ... + 0.3825; ... + 0.0550; ... + 0.4375; ... + 0.1100; ... + 0.4925; ... + 0.2200; ... + 0.6025; ... + 0.2750; ... + 0.6575; ... + 0.3275; ... + 0.7100; ... + 0.1650], [3, 1]); opt.taskName = 'vismotion'; [~, opt] = getData(opt); diff --git a/tests/test_inputFileValidation.m b/tests/test_inputFileValidation.m index 29aea063..07b62bb1 100644 --- a/tests/test_inputFileValidation.m +++ b/tests/test_inputFileValidation.m @@ -9,13 +9,13 @@ function test_inputFileValidationBasic() directory = fullfile(fileparts(mfilename('fullpath')), 'dummyData', 'derivatives', ... - 'SPM12_CPPL', 'sub-01', 'ses-01', 'func'); + 'SPM12_CPPL', 'sub-01', 'ses-01', 'func'); prefix = ''; fileName = 'sub-01_ses-01_task-vislocalizer_bold.nii'; expectedOutput = fullfile(fileparts(mfilename('fullpath')), 'dummyData', 'derivatives', ... - 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... - 'sub-01_ses-01_task-vislocalizer_bold.nii'); + 'SPM12_CPPL', 'sub-01', 'ses-01', 'func', ... + 'sub-01_ses-01_task-vislocalizer_bold.nii'); file = inputFileValidation(directory, prefix, fileName); @@ -30,7 +30,7 @@ function test_inputFileValidationError() fileName = 'gibberish.nii.gz'; assertExceptionThrown( ... - @()inputFileValidation(directory, prefix, fileName), ... - 'inputFileValidation:nonExistentFile'); + @()inputFileValidation(directory, prefix, fileName), ... + 'inputFileValidation:nonExistentFile'); end diff --git a/tests/test_modelFiles.m b/tests/test_modelFiles.m index f14f429d..f15762cf 100644 --- a/tests/test_modelFiles.m +++ b/tests/test_modelFiles.m @@ -21,7 +21,7 @@ function test_modelFilesBasic() %% file = fullfile(fileparts(mfilename('fullpath')), '..', ... - 'model-visMotionLoc_smdl.json'); + 'model-visMotionLoc_smdl.json'); model = spm_jsonread(file); @@ -29,7 +29,7 @@ function test_modelFilesBasic() %% file = fullfile(fileparts(mfilename('fullpath')), '..', ... - 'model-motionDecodingUnivariate_smdl.json'); + 'model-motionDecodingUnivariate_smdl.json'); model = spm_jsonread(file); @@ -37,7 +37,7 @@ function test_modelFilesBasic() %% file = fullfile(fileparts(mfilename('fullpath')), '..', ... - 'model-motionDecodingMultivariate_smdl.json'); + 'model-motionDecodingMultivariate_smdl.json'); model = spm_jsonread(file); diff --git a/tests/test_saveMatlabBatch.m b/tests/test_saveMatlabBatch.m index 51acc575..ac4b8327 100644 --- a/tests/test_saveMatlabBatch.m +++ b/tests/test_saveMatlabBatch.m @@ -13,7 +13,8 @@ function test_saveMatlabBatchBasic() matlabbatch = struct('test', 1); expectedOutput = fullfile(pwd, 'sub-01', ... - [datestr(now, 'yyyymmdd_HHMM') '_jobs_matlabbatch_SPM12_test.mat']); + [datestr(now, 'yyyymmdd_HHMM') ... + '_jobs_matlabbatch_SPM12_test.mat']); saveMatlabBatch(matlabbatch, 'test', opt, subID); @@ -27,7 +28,8 @@ function test_saveMatlabBatchGroup() matlabbatch = struct('test', 1); expectedOutput = fullfile(pwd, 'group', ... - [datestr(now, 'yyyymmdd_HHMM') '_jobs_matlabbatch_SPM12_groupTest.mat']); + [datestr(now, 'yyyymmdd_HHMM') ... + '_jobs_matlabbatch_SPM12_groupTest.mat']); saveMatlabBatch(matlabbatch, 'groupTest', opt); diff --git a/tests/test_setBatchCoregistration.m b/tests/test_setBatchCoregistration.m index adaa015a..3afb468e 100644 --- a/tests/test_setBatchCoregistration.m +++ b/tests/test_setBatchCoregistration.m @@ -35,8 +35,8 @@ function test_setBatchCoregistrationBasic() 'Named File Selector: Structural(1) - Files'; expectedBatch{end}.spm.spatial.coreg.estimate.ref(1).src_exbranch = ... substruct( ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); expectedBatch{end}.spm.spatial.coreg.estimate.ref(1).src_output = ... substruct('.', 'files', '{}', {1}); @@ -50,10 +50,10 @@ function test_setBatchCoregistrationBasic() 'Realign: Estimate & Reslice: Mean Image'; expectedBatch{end}.spm.spatial.coreg.estimate.source(1).src_exbranch = ... substruct( ... - '.', 'val', '{}', {2}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {2}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); expectedBatch{end}.spm.spatial.coreg.estimate.source(1).src_output = ... substruct('.', 'rmean'); @@ -72,14 +72,14 @@ function test_setBatchCoregistrationBasic() ['Realign: Estimate & Reslice: Realigned Images (Sess ' (iSes) ')']; expectedBatch{end}.spm.spatial.coreg.estimate.other(iSes).src_exbranch = ... substruct( ... - '.', 'val', '{}', {2}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}); + '.', 'val', '{}', {2}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}); expectedBatch{end}.spm.spatial.coreg.estimate.other(iSes).src_output = ... substruct( ... - '.', 'sess', '()', {iSes}, ... - '.', 'cfiles'); + '.', 'sess', '()', {iSes}, ... + '.', 'cfiles'); end end diff --git a/tests/test_setBatchNormalizationSpatialPrepro.m b/tests/test_setBatchNormalizationSpatialPrepro.m index 7837a65c..2a4b7db7 100644 --- a/tests/test_setBatchNormalizationSpatialPrepro.m +++ b/tests/test_setBatchNormalizationSpatialPrepro.m @@ -31,66 +31,66 @@ function test_setBatchNormalizationSpatialPreproBasic() for iJob = jobsToAdd:(jobsToAdd + 4) expectedBatch{iJob}.spm.spatial.normalise.write.subj.def(1) = ... cfg_dep('Segment: Forward Deformations', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'fordef', '()', {':'})); %#ok<*AGROW> + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'fordef', '()', {':'})); %#ok<*AGROW> end expectedBatch{jobsToAdd}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Coregister: Estimate: Coregistered Images', ... - substruct( ... - '.', 'val', '{}', {3}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'cfiles')); + substruct( ... + '.', 'val', '{}', {3}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'cfiles')); expectedBatch{jobsToAdd}.spm.spatial.normalise.write.woptions.vox = voxDim; expectedBatch{jobsToAdd + 1}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: Bias Corrected (1)', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'channel', '()', {1}, ... - '.', 'biascorr', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'channel', '()', {1}, ... + '.', 'biascorr', '()', {':'})); expectedBatch{jobsToAdd + 1}.spm.spatial.normalise.write.woptions.vox = [1 1 1]; expectedBatch{jobsToAdd + 2}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c1 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {1}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {1}, ... + '.', 'c', '()', {':'})); expectedBatch{jobsToAdd + 2}.spm.spatial.normalise.write.woptions.vox = voxDim; expectedBatch{jobsToAdd + 3}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c2 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {2}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {2}, ... + '.', 'c', '()', {':'})); expectedBatch{jobsToAdd + 3}.spm.spatial.normalise.write.woptions.vox = voxDim; expectedBatch{jobsToAdd + 4}.spm.spatial.normalise.write.subj.resample(1) = ... cfg_dep('Segment: c3 Images', ... - substruct( ... - '.', 'val', '{}', {4}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct( ... - '.', 'tiss', '()', {3}, ... - '.', 'c', '()', {':'})); + substruct( ... + '.', 'val', '{}', {4}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct( ... + '.', 'tiss', '()', {3}, ... + '.', 'c', '()', {':'})); expectedBatch{jobsToAdd + 4}.spm.spatial.normalise.write.woptions.vox = voxDim; end diff --git a/tests/test_setBatchRealign.m b/tests/test_setBatchRealign.m index be122751..24e8e07e 100644 --- a/tests/test_setBatchRealign.m +++ b/tests/test_setBatchRealign.m @@ -20,10 +20,10 @@ function test_setBatchRealignBasic() runCounter = 1; for iSes = 1:2 fileName = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'ses', sprintf('0%i', iSes), ... - 'task', opt.taskName, ... - 'type', 'bold'); + 'sub', subID, ... + 'ses', sprintf('0%i', iSes), ... + 'task', opt.taskName, ... + 'type', 'bold'); for iFile = 1:numel(fileName) [pth, nam, ext] = spm_fileparts(fileName{iFile}); diff --git a/tests/test_setBatchSTC.m b/tests/test_setBatchSTC.m index e39f0b7b..5f328891 100644 --- a/tests/test_setBatchSTC.m +++ b/tests/test_setBatchSTC.m @@ -38,10 +38,10 @@ function test_setBatchSTCForce() runCounter = 1; for iSes = 1:2 fileName = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'ses', sprintf('0%i', iSes), ... - 'task', opt.taskName, ... - 'type', 'bold'); + 'sub', subID, ... + 'ses', sprintf('0%i', iSes), ... + 'task', opt.taskName, ... + 'type', 'bold'); expectedBatch{1}.spm.temporal.st.scans{runCounter} = {fileName{1}}; runCounter = runCounter + 1; end @@ -61,8 +61,9 @@ function test_setBatchSTCBasic() TR = 1.5; sliceOrder = repmat([ ... - 0.5475, 0, 0.3825, 0.055, 0.4375, 0.11, 0.4925, 0.22, 0.6025, 0.275, 0.6575, ... - 0.3275, 0.71, 0.165], 1, 3)'; + 0.5475, 0, 0.3825, 0.055, 0.4375, 0.11, 0.4925, 0.22, 0.6025, ... + 0.275, 0.6575, ... + 0.3275, 0.71, 0.165], 1, 3)'; STC_referenceSlice = 0.355; expectedBatch = returnExpectedBatch(sliceOrder, STC_referenceSlice, TR); @@ -70,10 +71,10 @@ function test_setBatchSTCBasic() runCounter = 1; for iSes = 1:2 fileName = spm_BIDS(BIDS, 'data', ... - 'sub', subID, ... - 'ses', sprintf('0%i', iSes), ... - 'task', opt.taskName, ... - 'type', 'bold'); + 'sub', subID, ... + 'ses', sprintf('0%i', iSes), ... + 'task', opt.taskName, ... + 'type', 'bold'); expectedBatch{1}.spm.temporal.st.scans{runCounter} = ... {fileName{1}}; expectedBatch{1}.spm.temporal.st.scans{runCounter + 1} = ... diff --git a/tests/test_setBatchSegmentation.m b/tests/test_setBatchSegmentation.m index b568a949..101f1f2f 100644 --- a/tests/test_setBatchSegmentation.m +++ b/tests/test_setBatchSegmentation.m @@ -27,12 +27,12 @@ function test_setBatchSegmentationBasic() expectedBatch{end + 1}.spm.spatial.preproc.channel.vols(1) = ... cfg_dep('Named File Selector: Structural(1) - Files', ... - substruct( ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}, ... - '.', 'val', '{}', {1}), ... - substruct('.', 'files', '{}', {1})); + substruct( ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}, ... + '.', 'val', '{}', {1}), ... + substruct('.', 'files', '{}', {1})); expectedBatch{end}.spm.spatial.preproc.channel.biasreg = 0.001; expectedBatch{end}.spm.spatial.preproc.channel.biasfwhm = 60; expectedBatch{end}.spm.spatial.preproc.channel.write = [0 1]; diff --git a/tests/test_specifyContrasts.m b/tests/test_specifyContrasts.m index a707143f..e9e4fc94 100644 --- a/tests/test_specifyContrasts.m +++ b/tests/test_specifyContrasts.m @@ -13,7 +13,7 @@ function test_specifyContrastsBasic() opt.taskName = 'visMotion'; opt.model.univariate.file = ... fullfile(fileparts(mfilename('fullpath')), ... - 'dummyData', 'model', 'model-visMotionLoc_smdl.json'); + 'dummyData', 'model', 'model-visMotionLoc_smdl.json'); ffxDir = fullfile(opt.dataDir, 'SPM12_CPPL', 'sub-01', 'stats', 'ffx_visMotion', 'ffx_6'); From 47e95f63f4cd40a02314860f076b62d6c13a50e8 Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 17:05:24 +0200 Subject: [PATCH 3/6] fix CI --- .travis.yml | 2 ++ npm-requirements.txt | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ef870536..12193b07 100644 --- a/.travis.yml +++ b/.travis.yml @@ -72,4 +72,6 @@ jobs: before_script: - npm install `cat npm-requirements.txt` script: + - pwd + - ls -a - remark . --frail \ No newline at end of file diff --git a/npm-requirements.txt b/npm-requirements.txt index 0584fc50..061f9ade 100644 --- a/npm-requirements.txt +++ b/npm-requirements.txt @@ -1,4 +1,3 @@ -# list of javascript package to install for the remark markdonw linter remark-cli@5.0.0 remark-lint@6.0.2 remark-preset-lint-recommended@3.0.2 From b39779418dc204bf3bdbd30a2b207c0519f6b107 Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 17:16:50 +0200 Subject: [PATCH 4/6] fix CI --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 12193b07..0f6781b4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,9 +69,10 @@ jobs: # naming the Linter stage - stage: Markdown linter name: "Check markdown" + language: node_js + node_js: + - "10" before_script: - npm install `cat npm-requirements.txt` script: - - pwd - - ls -a - remark . --frail \ No newline at end of file From 6a91352690420105539a057fb55a7df26bee39a3 Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 17:37:19 +0200 Subject: [PATCH 5/6] lint markdown --- CHANGELOG.md | 195 ++++++++++++------ README.md | 360 +++++++++++++++++++++------------- tests/README.md | 4 +- tests/dummyData/raw/README.md | 2 +- 4 files changed, 371 insertions(+), 190 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e705c477..a63e8576 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,19 +6,37 @@ **Closed issues:** -- add code coverage [\#77](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/77) -- getData :Accommidate scripts for structural analysis \(without funtional\). [\#68](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/68) +- add code coverage + [\#77](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/77) +- getData :Accommidate scripts for structural analysis \(without funtional\). + [\#68](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/68) **Merged pull requests:** -- major refactoring reporting for duty [\#79](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/79) ([Remi-Gau](https://github.com/Remi-Gau)) -- fix linter and use Mox unit for testing [\#76](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/76) ([Remi-Gau](https://github.com/Remi-Gau)) -- reorganize and set up MH linter [\#75](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/75) ([Remi-Gau](https://github.com/Remi-Gau)) -- add octave in dependencies [\#71](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/71) ([Remi-Gau](https://github.com/Remi-Gau)) -- allow getData to query only anatomical data [\#69](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/69) ([Remi-Gau](https://github.com/Remi-Gau)) -- update print credit [\#63](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/63) ([Remi-Gau](https://github.com/Remi-Gau)) -- update DOI [\#62](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/62) ([Remi-Gau](https://github.com/Remi-Gau)) -- update DOI badge [\#61](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/61) ([Remi-Gau](https://github.com/Remi-Gau)) +- major refactoring reporting for duty + [\#79](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/79) + ([Remi-Gau](https://github.com/Remi-Gau)) +- fix linter and use Mox unit for testing + [\#76](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/76) + ([Remi-Gau](https://github.com/Remi-Gau)) +- reorganize and set up MH linter + [\#75](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/75) + ([Remi-Gau](https://github.com/Remi-Gau)) +- add octave in dependencies + [\#71](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/71) + ([Remi-Gau](https://github.com/Remi-Gau)) +- allow getData to query only anatomical data + [\#69](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/69) + ([Remi-Gau](https://github.com/Remi-Gau)) +- update print credit + [\#63](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/63) + ([Remi-Gau](https://github.com/Remi-Gau)) +- update DOI + [\#62](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/62) + ([Remi-Gau](https://github.com/Remi-Gau)) +- update DOI badge + [\#61](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/61) + ([Remi-Gau](https://github.com/Remi-Gau)) ## [v0.0.3](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/tree/v0.0.3) (2019-11-28) @@ -26,7 +44,9 @@ **Merged pull requests:** -- fix RFX issues and add credits [\#60](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/60) ([Remi-Gau](https://github.com/Remi-Gau)) +- fix RFX issues and add credits + [\#60](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/60) + ([Remi-Gau](https://github.com/Remi-Gau)) ## [v0.0.2](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/tree/v0.0.2) (2019-11-26) @@ -34,8 +54,12 @@ **Merged pull requests:** -- Create LICENSE [\#58](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/58) ([Remi-Gau](https://github.com/Remi-Gau)) -- fix problem when task JSON file is missing from root folder [\#56](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/56) ([Remi-Gau](https://github.com/Remi-Gau)) +- Create LICENSE + [\#58](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/58) + ([Remi-Gau](https://github.com/Remi-Gau)) +- fix problem when task JSON file is missing from root folder + [\#56](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/56) + ([Remi-Gau](https://github.com/Remi-Gau)) ## [v0.0.1](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/tree/v0.0.1) (2019-11-13) @@ -43,51 +67,112 @@ **Implemented enhancements:** -- refactor getRuns, getSessions, getFilenames [\#9](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/9) -- find a way to specify the contrasts to compute [\#11](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/11) -- Complete boiler plate methods section of the README [\#17](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/17) ([Remi-Gau](https://github.com/Remi-Gau)) -- Big PR to fix previous PR, update FFX and RFX [\#2](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/2) ([Remi-Gau](https://github.com/Remi-Gau)) +- refactor getRuns, getSessions, getFilenames + [\#9](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/9) +- find a way to specify the contrasts to compute + [\#11](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/11) +- Complete boiler plate methods section of the README + [\#17](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/17) + ([Remi-Gau](https://github.com/Remi-Gau)) +- Big PR to fix previous PR, update FFX and RFX + [\#2](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/2) + ([Remi-Gau](https://github.com/Remi-Gau)) **Closed issues:** -- spm\_jsonread issue [\#54](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/54) -- \[WORKSHOP 2019\] - to do list [\#48](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/48) -- \[WORKSHOP 2019\] - MVPA [\#47](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/47) -- \[WORKSHOP 2019\] - sharing data [\#46](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/46) -- changes way subjects are indexed in getData [\#44](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/44) -- Add Moh as contributors [\#37](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/37) -- add Marco as contributors [\#35](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/35) -- getData issue with 2 groups [\#22](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/22) -- create a function to checks options and set some defaults if none are specified [\#15](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/15) -- use SPM BIDS data set from SPM tuto to test and make a simple demo [\#7](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/7) -- test with octave [\#5](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/5) -- export final results as NIDM results to facilitate upload to neurovault [\#52](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/52) -- Consider to unzip the bold files in a separate function from BIDS\_rmDumies.m [\#26](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/26) -- create a function that copies the raw data into a derivatives/SPM12-CPP directory [\#25](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/25) +- spm_jsonread issue + [\#54](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/54) +- \[WORKSHOP 2019\] - to do list + [\#48](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/48) +- \[WORKSHOP 2019\] - MVPA + [\#47](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/47) +- \[WORKSHOP 2019\] - sharing data + [\#46](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/46) +- changes way subjects are indexed in getData + [\#44](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/44) +- Add Moh as contributors + [\#37](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/37) +- add Marco as contributors + [\#35](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/35) +- getData issue with 2 groups + [\#22](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/22) +- create a function to checks options and set some defaults if none are + specified + [\#15](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/15) +- use SPM BIDS data set from SPM tuto to test and make a simple demo + [\#7](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/7) +- test with octave + [\#5](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/5) +- export final results as NIDM results to facilitate upload to neurovault + [\#52](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/52) +- Consider to unzip the bold files in a separate function from BIDS_rmDumies.m + [\#26](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/26) +- create a function that copies the raw data into a derivatives/SPM12-CPP + directory + [\#25](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/issues/25) **Merged pull requests:** -- BIDS results [\#55](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/55) ([Remi-Gau](https://github.com/Remi-Gau)) -- \[WIP\] constrast specification + uni and multivaraite issues + remove dummies [\#51](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/51) ([Remi-Gau](https://github.com/Remi-Gau)) -- fix some details in getData to pass tests and make octave compatible [\#45](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/45) ([Remi-Gau](https://github.com/Remi-Gau)) -- fix indexing issue in getData [\#43](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/43) ([Remi-Gau](https://github.com/Remi-Gau)) -- docs: add OliColli as a contributor [\#40](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/40) ([allcontributors[bot]](https://github.com/apps/allcontributors)) -- docs: add anege as a contributor [\#39](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/39) ([allcontributors[bot]](https://github.com/apps/allcontributors)) -- docs: add mohmdrezk as a contributor [\#38](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/38) ([allcontributors[bot]](https://github.com/apps/allcontributors)) -- docs: add marcobarilari as a contributor [\#36](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/36) ([allcontributors[bot]](https://github.com/apps/allcontributors)) -- docs: add Remi-Gau as a contributor [\#30](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/30) ([allcontributors[bot]](https://github.com/apps/allcontributors)) -- Add another test to getData and improve README [\#28](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/28) ([Remi-Gau](https://github.com/Remi-Gau)) -- edit info about step1: 'Remove Dummy Scans' [\#27](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/27) ([marcobarilari](https://github.com/marcobarilari)) -- update doc [\#24](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/24) ([Remi-Gau](https://github.com/Remi-Gau)) -- - fix getData issue when there are multiple groups [\#23](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/23) ([mohmdrezk](https://github.com/mohmdrezk)) -- Ane getsessions merge [\#19](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/19) ([anege](https://github.com/anege)) -- Dockerfile and option defaults [\#16](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/16) ([Remi-Gau](https://github.com/Remi-Gau)) -- Containerization of the pipeline with docker and octave [\#13](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/13) ([Remi-Gau](https://github.com/Remi-Gau)) -- fix crash on batch\_download\_run [\#12](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/12) ([Remi-Gau](https://github.com/Remi-Gau)) -- Initial preparartion for Workshop scripts for Motion dataset [\#10](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/10) ([mohmdrezk](https://github.com/mohmdrezk)) -- delete extra getOptions [\#8](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/8) ([Remi-Gau](https://github.com/Remi-Gau)) -- Split into 3 repos [\#1](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/1) ([Remi-Gau](https://github.com/Remi-Gau)) - - - -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* +- BIDS results + [\#55](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/55) + ([Remi-Gau](https://github.com/Remi-Gau)) +- \[WIP\] constrast specification + uni and multivaraite issues + remove + dummies [\#51](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/51) + ([Remi-Gau](https://github.com/Remi-Gau)) +- fix some details in getData to pass tests and make octave compatible + [\#45](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/45) + ([Remi-Gau](https://github.com/Remi-Gau)) +- fix indexing issue in getData + [\#43](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/43) + ([Remi-Gau](https://github.com/Remi-Gau)) +- docs: add OliColli as a contributor + [\#40](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/40) + ([allcontributors[bot]](https://github.com/apps/allcontributors)) +- docs: add anege as a contributor + [\#39](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/39) + ([allcontributors[bot]](https://github.com/apps/allcontributors)) +- docs: add mohmdrezk as a contributor + [\#38](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/38) + ([allcontributors[bot]](https://github.com/apps/allcontributors)) +- docs: add marcobarilari as a contributor + [\#36](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/36) + ([allcontributors[bot]](https://github.com/apps/allcontributors)) +- docs: add Remi-Gau as a contributor + [\#30](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/30) + ([allcontributors[bot]](https://github.com/apps/allcontributors)) +- Add another test to getData and improve README + [\#28](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/28) + ([Remi-Gau](https://github.com/Remi-Gau)) +- edit info about step1: 'Remove Dummy Scans' + [\#27](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/27) + ([marcobarilari](https://github.com/marcobarilari)) +- update doc + [\#24](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/24) + ([Remi-Gau](https://github.com/Remi-Gau)) +- - fix getData issue when there are multiple groups + [\#23](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/23) + ([mohmdrezk](https://github.com/mohmdrezk)) +- Ane getsessions merge + [\#19](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/19) + ([anege](https://github.com/anege)) +- Dockerfile and option defaults + [\#16](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/16) + ([Remi-Gau](https://github.com/Remi-Gau)) +- Containerization of the pipeline with docker and octave + [\#13](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/13) + ([Remi-Gau](https://github.com/Remi-Gau)) +- fix crash on batch_download_run + [\#12](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/12) + ([Remi-Gau](https://github.com/Remi-Gau)) +- Initial preparartion for Workshop scripts for Motion dataset + [\#10](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/10) + ([mohmdrezk](https://github.com/mohmdrezk)) +- delete extra getOptions + [\#8](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/8) + ([Remi-Gau](https://github.com/Remi-Gau)) +- Split into 3 repos + [\#1](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/pull/1) + ([Remi-Gau](https://github.com/Remi-Gau)) + +\* _This Changelog was automatically generated by +[github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)_ diff --git a/README.md b/README.md index 96df16c9..420f4d77 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ **Code quality and style** [![](https://img.shields.io/badge/Octave-CI-blue?logo=Octave&logoColor=white)](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/actions) -![](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/workflows/CI/badge.svg) +![](https://github.com/cpp-lln-lab/CPP_BIDS_SPM_pipeline/workflows/CI/badge.svg) **Unit tests and coverage** @@ -13,144 +13,171 @@ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3556173.svg)](https://doi.org/10.5281/zenodo.3556173) **Contributors** + + [![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors) + # Instructions for SPM12 Preprocessing Pipeline -- [Instructions for SPM12 Preprocessing Pipeline](#instructions-for-spm12-preprocessing-pipeline) - - [Dependencies](#dependencies) - - [General description](#general-description) - - [Assumption](#assumption) - - [Setting up](#setting-up) - - [getOptions](#getoptions) - - [spm_my_defaults](#spm_my_defaults) - - [model JSON files](#model-json-files) - - [Order of the analysis](#order-of-the-analysis) - - [Docker](#docker) - - [build docker image](#build-docker-image) - - [run docker image](#run-docker-image) - - [MRIQC](#mriqc) - - [Details about some steps](#details-about-some-steps) - - [Slice timing correction](#slice-timing-correction) - - [Boiler plate methods section](#boiler-plate-methods-section) - - [Preprocessing](#preprocessing) - - [fMRI data analysis](#fmri-data-analysis) - - [References](#references) - - [Unit testing](#unit-testing) - - [Changelog](#changelog) - - [Contributors ✨](#contributors-) +- [Instructions for SPM12 Preprocessing Pipeline](#instructions-for-spm12-preprocessing-pipeline) + - [Dependencies](#dependencies) + - [General description](#general-description) + - [Assumption](#assumption) + - [Setting up](#setting-up) + - [getOptions](#getoptions) + - [spm_my_defaults](#spm_my_defaults) + - [model JSON files](#model-json-files) + - [Order of the analysis](#order-of-the-analysis) + - [Docker](#docker) + - [build docker image](#build-docker-image) + - [run docker image](#run-docker-image) + - [MRIQC](#mriqc) + - [Details about some steps](#details-about-some-steps) + - [Slice timing correction](#slice-timing-correction) + - [Boiler plate methods section](#boiler-plate-methods-section) + - [Preprocessing](#preprocessing) + - [fMRI data analysis](#fmri-data-analysis) + - [References](#references) + - [Unit testing](#unit-testing) + - [Changelog](#changelog) + - [Contributors ✨](#contributors-) - ## Dependencies -Make sure that the following toolboxes are installed and added to the matlab path. +Make sure that the following toolboxes are installed and added to the matlab +path. For instructions see the following links: | Dependencies | Used version | -|-------------------------------------------------------------------------------------------|--------------| +| ----------------------------------------------------------------------------------------- | ------------ | | [Matlab](https://www.mathworks.com/products/matlab.html) | 20??? | | or [octave](https://www.gnu.org/software/octave/) | 4.? | | [SPM12](https://www.fil.ion.ucl.ac.uk/spm/software/spm12/) | v7487 | | [Tools for NIfTI and ANALYZE image toolbox](https://github.com/sergivalverde/nifti_tools) | NA | -For simplicity the NIfTI tools toolbox has been added to this repo in the `subfun` folder. - +For simplicity the NIfTI tools toolbox has been added to this repo in the +`subfun` folder. ## General description -This set of function will read and unzip the data from a [BIDS data set](https://bids.neuroimaging.io/). It will then perform: +This set of function will read and unzip the data from a +[BIDS data set](https://bids.neuroimaging.io/). It will then perform: + - slice timing correction, - spatial preprocessing (realignment, normalization to MNI space), - smoothing, - GLM at the subject level and - GLM at the group level a la SPM (i.e summary statistics approach). -This has to be run for each task independently. All parameters should preferably be changed in the `getOptions.m` file. +This has to be run for each task independently. All parameters should preferably +be changed in the `getOptions.m` file. -It can also prepare the data to run an MVPA analysis by running a GLM for each subject on non-normalized images and get one beta image for each condition to be used in the MVPA. +It can also prepare the data to run an MVPA analysis by running a GLM for each +subject on non-normalized images and get one beta image for each condition to be +used in the MVPA. The core functions are in the sub-function folder `subfun` - ## Assumption At the moment this pipeline makes some assumptions: -- it assumes that the dummy scans have been removed from the BIDS data set and it can jump straight into pre-processing, -- it assumes the metadata for a given task are the same as those the first run of the first subject this pipeline is being run on, -- it assumes that group are defined in the subject field (eg `sub-ctrl01`, `sub-blind01`, ...) and not in the `participants.tsv` file. +- it assumes that the dummy scans have been removed from the BIDS data set and + it can jump straight into pre-processing, +- it assumes the metadata for a given task are the same as those the first run + of the first subject this pipeline is being run on, +- it assumes that group are defined in the subject field (eg `sub-ctrl01`, + `sub-blind01`, ...) and not in the `participants.tsv` file. ## Setting up ### getOptions - -All the details specific to your analysis should be set in the `getOptions.m`. There is a getOption_template file that shows you would set up the getOption file if one wanted to analyse the [ds001 data set from OpenNeuro](https://openneuro.org/datasets/ds000001/versions/57fecb0ccce88d000ac17538). +All the details specific to your analysis should be set in the `getOptions.m`. +There is a getOption_template file that shows you would set up the getOption +file if one wanted to analyse the +[ds001 data set from OpenNeuro](https://openneuro.org/datasets/ds000001/versions/57fecb0ccce88d000ac17538). Set the group of subjects to analyze. + ``` opt.groups = {'control', 'blind'}; ``` -If there are no groups (i.e subjects names are of the form `sub-01` for example) or if you want to run all subjects of all groups then use: +If there are no groups (i.e subjects names are of the form `sub-01` for example) +or if you want to run all subjects of all groups then use: + ```matlab opt.groups = {''}; opt.subjects = {[]}; ``` -If you have 2 groups (`cont` and `cat` for example) the following will run cont01, cont02, cat03, cat04.. +If you have 2 groups (`cont` and `cat` for example) the following will run +cont01, cont02, cat03, cat04.. + ```matlab opt.groups = {'cont', 'cat'}; opt.subjects = {[1 2], [3 4]}; ``` -If you have more than 2 groups but want to only run the subjects of 2 groups then you can use. + +If you have more than 2 groups but want to only run the subjects of 2 groups +then you can use. + ```matlab opt.groups = {'cont', 'cat'}; opt.subjects = {[], []}; ``` -You can also directly specify the subject label for the participants you want to run + +You can also directly specify the subject label for the participants you want to +run + ```matlab opt.groups = {''}; opt.subjects = {'01', 'cont01', 'cat02', 'ctrl02', 'blind01'}; ``` -Set the task to analyze in the BIDS data set -`opt.taskName = 'auditory'` +Set the task to analyze in the BIDS data set `opt.taskName = 'auditory'` -The directory where your files are located on your computer: make sure you have a copy of the data set as this pipeline will change it. +The directory where your files are located on your computer: make sure you have +a copy of the data set as this pipeline will change it. `opt.derivativesDir = '/Data/auditoryBIDS/derivatives'` #### spm_my_defaults - Some more SPM options can be set in the `spm_my_defaults.m`. -- Use of FAST and not AR1 for auto-correlation modelisation +- Use of FAST and not AR1 for auto-correlation modelisation -Using FAST does not seem to affect results on time series with "normal" TRs but -improves results when using sequences: it is therefore used by default in this +Using FAST does not seem to affect results on time series with "normal" TRs but +improves results when using sequences: it is therefore used by default in this pipeline. -> Olszowy, W., Aston, J., Rua, C. et al. Accurate autocorrelation modeling -> substantially improves fMRI reliability. Nat Commun 10, 1220 (2019). +> Olszowy, W., Aston, J., Rua, C. et al. Accurate autocorrelation modeling +> substantially improves fMRI reliability. Nat Commun 10, 1220 (2019). > https://doi.org/10.1038/s41467-019-09230-w ### model JSON files -This files allow you to specify which contrasts to run and follow the BIDS statistical model extension and as implement by [fitlins](https://fitlins.readthedocs.io/en/latest/model.html) + +This files allow you to specify which contrasts to run and follow the BIDS +statistical model extension and as implement by +[fitlins](https://fitlins.readthedocs.io/en/latest/model.html) The model json file that describes: + - out to prepare the regressors for the GLM: `Transformation` - the design matrix: `X` - the contrasts to compute: `contrasts` It also allows to specify those for different levels of the analysis: + - run - session - subject @@ -160,104 +187,121 @@ An example of json file could look something like that. ```json { - "Name": "Basic", - "Description": "", - "Input": { - "task": "motionloc" - }, - "Steps": [ - { - "Level": "subject", - "AutoContrasts": ["stim_type.motion", "stim_type.static"], - "Contrasts": [ + "Name": "Basic", + "Description": "", + "Input": { + "task": "motionloc" + }, + "Steps": [ { - "Name": "motion_vs_static", - "ConditionList": [ - "stim_type.motion", - "stim_type.static" - ], - "weights": [1, -1], - "type": "t" + "Level": "subject", + "AutoContrasts": ["stim_type.motion", "stim_type.static"], + "Contrasts": [ + { + "Name": "motion_vs_static", + "ConditionList": ["stim_type.motion", "stim_type.static"], + "weights": [1, -1], + "type": "t" + } + ] + }, + { + "Level": "dataset", + "AutoContrasts": [ + "stim_type.motion", + "stim_type.static", + "motion_vs_static" + ] } - ] - }, - { - "Level": "dataset", - "AutoContrasts": ["stim_type.motion", "stim_type.static", "motion_vs_static"] - } - ] + ] } ``` In brief this means: -- at the subject level automatically compute the t contrast against baseline for the condition `motion`and `static` and compute the t-contrats for motion VS static with these given weights. -- at the level of the data set (so RFX) do the t contrast of the `motion`, `static`, `motion VS static`. -We are currently using this to run different subject level GLM models for our univariate and multivariate analysis where in the first one we compute a con image that averages the beta image of all the runs where as in the latter case we get one con image for each run. +- at the subject level automatically compute the t contrast against baseline + for the condition `motion`and `static` and compute the t-contrats for motion + VS static with these given weights. +- at the level of the data set (so RFX) do the t contrast of the `motion`, + `static`, `motion VS static`. +We are currently using this to run different subject level GLM models for our +univariate and multivariate analysis where in the first one we compute a con +image that averages the beta image of all the runs where as in the latter case +we get one con image for each run. ## Order of the analysis -1. __Remove Dummy Scans__: -Unzip bold files and removes dummy scans by running the script (to be run even if `opt.numDummies` set to `0`): `BIDS_rmDummies.m` +1. **Remove Dummy Scans**: Unzip bold files and removes dummy scans by running + the script (to be run even if `opt.numDummies` set to `0`): + `BIDS_rmDummies.m` -2. __Slice Time Correction__: Performs Slice Time Correction (STC) of the functional volumes by running the script: `BIDS_STC.m` +2. **Slice Time Correction**: Performs Slice Time Correction (STC) of the + functional volumes by running the script: `BIDS_STC.m` -STC will be performed using the information provided in the BIDS data set. It will use the mid-volume acquisition time point as as reference. +STC will be performed using the information provided in the BIDS data set. It +will use the mid-volume acquisition time point as as reference. -The `getOption.m` fields related to STC can still be used to do some slice timing correction even no information is can be found in the BIDS data set. +The `getOption.m` fields related to STC can still be used to do some slice +timing correction even no information is can be found in the BIDS data set. -In general slice order and reference slice is entered in time unit (ms) (this is the BIDS way of doing things) instead of the slice index of the reference slice (the "SPM" way of doing things). +In general slice order and reference slice is entered in time unit (ms) (this is +the BIDS way of doing things) instead of the slice index of the reference slice +(the "SPM" way of doing things). -More info available on this page of the [SPM wikibook](https://en.wikibooks.org/wiki/SPM/Slice_Timing). +More info available on this page of the +[SPM wikibook](https://en.wikibooks.org/wiki/SPM/Slice_Timing). -3. __Spatial Preprocessing__: -Performs spatial preprocessing by running the script: `BIDS_SpatialPrepro.m` +3. **Spatial Preprocessing**: Performs spatial preprocessing by running the + script: `BIDS_SpatialPrepro.m` -4. __SMOOTHING__: -Performs smoothing of the functional data by running the script: `BIDS_Smoothing.m` +4. **SMOOTHING**: Performs smoothing of the functional data by running the + script: `BIDS_Smoothing.m` -5. __FIXED EFFECTS ANALYSIS (FIRST-LEVEL ANALYSIS)__: -Performs the fixed effects analysis by running the ffx script: `BIDS_FFX.m` +5. **FIXED EFFECTS ANALYSIS (FIRST-LEVEL ANALYSIS)**: Performs the fixed + effects analysis by running the ffx script: `BIDS_FFX.m` -This will run twice, once for model specification and another time for model estimation. See the function for more details. +This will run twice, once for model specification and another time for model +estimation. See the function for more details. -This will take each condition present in the `events.tsv` file of each run and convolve it with a canonical HRF. It will also add the 6 realignment parameters of every run as confound regressors. +This will take each condition present in the `events.tsv` file of each run and +convolve it with a canonical HRF. It will also add the 6 realignment parameters +of every run as confound regressors. -6. __RANDOM EFFECTS ANALYSIS (SECOND-LEVEL ANALYSIS)__: -Performs the random effects analysis by running the RFX script: `BIDS_RFX.m` +6. **RANDOM EFFECTS ANALYSIS (SECOND-LEVEL ANALYSIS)**: Performs the random + effects analysis by running the RFX script: `BIDS_RFX.m` -7. __GET THE RESULTS FROM A SPECIFIC CONTRAST__: `BIDS_Results.m` - -- See __"batch.m"__ for examples and for the order of the scripts. -- See __"batch_dowload_run.m"__ for an example of how to download a data set and analyze it all in one go. +7. **GET THE RESULTS FROM A SPECIFIC CONTRAST**: `BIDS_Results.m` +- See **"batch.m"** for examples and for the order of the scripts. +- See **"batch_dowload_run.m"** for an example of how to download a data set + and analyze it all in one go. ## Docker The recipe to build the docker image is in the `Dockerfile` - ### build docker image To build the image with with octave and SPM the `Dockerfile` just type : -`docker build -t cpp_spm:0.0.1 .` +`docker build -t cpp_spm:0.0.1 .` This will create an image with the tag name `cpp_spm_octave:0.0.1` - ### run docker image -The following code would start the docker image and would map 2 folders one for `output` and one for `code` you want to run. +The following code would start the docker image and would map 2 folders one for +`output` and one for `code` you want to run. -``` bash +```bash docker run -it --rm \ -v [output_folder]:/output \ -v [code_folder]:/code cpp_spm:0.0.1 ``` -To test it you can copy the `batch_download_run.m` file in the `code` folder on your computer and then start running the docker and type: +To test it you can copy the `batch_download_run.m` file in the `code` folder on +your computer and then start running the docker and type: ```bash cd /code # to change to the code folder inside the container (running the command 'ls' should show only batch_download_run.m ) @@ -276,69 +320,116 @@ docker run -it --rm -v $data_dir/raw:/data:ro -v $data_dir:/out poldracklab/mriq ## Details about some steps - ### Slice timing correction -BELOW: some comments from [here](http://mindhive.mit.edu/node/109) on STC, when it should be applied +BELOW: some comments from [here](http://mindhive.mit.edu/node/109) on STC, when +it should be applied _At what point in the processing stream should you use it?_ -_This is the great open question about slice timing, and it's not super-answerable. Both SPM and AFNI recommend you do it before doing realignment/motion correction, but it's not entirely clear why. The issue is this:_ - -_If you do slice timing correction before realignment, you might look down your non-realigned time course for a given voxel on the border of gray matter and CSF, say, and see one TR where the head moved and the voxel sampled from CSF instead of gray. This would results in an interpolation error for that voxel, as it would attempt to interpolate part of that big giant signal into the previous voxel. On the other hand, if you do realignment before slice timing correction, you might shift a voxel or a set of voxels onto a different slice, and then you'd apply the wrong amount of slice timing correction to them when you corrected - you'd be shifting the signal as if it had come from slice 20, say, when it actually came from slice 19, and shouldn't be shifted as much._ - -_There's no way to avoid all the error (short of doing a four-dimensional realignment process combining spatial and temporal correction - Remi's note: fMRIprep does it), but I believe the current thinking is that doing slice timing first minimizes your possible error. The set of voxels subject to such an interpolation error is small, and the interpolation into another TR will also be small and will only affect a few TRs in the time course. By contrast, if one realigns first, many voxels in a slice could be affected at once, and their whole time courses will be affected. I think that's why it makes sense to do slice timing first. That said, here's some articles from the SPM e-mail list that comment helpfully on this subject both ways, and there are even more if you do a search for "slice timing AND before" in the archives of the list._ - +_This is the great open question about slice timing, and it's not +super-answerable. Both SPM and AFNI recommend you do it before doing +realignment/motion correction, but it's not entirely clear why. The issue is +this:_ + +_If you do slice timing correction before realignment, you might look down your +non-realigned time course for a given voxel on the border of gray matter and +CSF, say, and see one TR where the head moved and the voxel sampled from CSF +instead of gray. This would results in an interpolation error for that voxel, as +it would attempt to interpolate part of that big giant signal into the previous +voxel. On the other hand, if you do realignment before slice timing correction, +you might shift a voxel or a set of voxels onto a different slice, and then +you'd apply the wrong amount of slice timing correction to them when you +corrected - you'd be shifting the signal as if it had come from slice 20, say, +when it actually came from slice 19, and shouldn't be shifted as much._ + +_There's no way to avoid all the error (short of doing a four-dimensional +realignment process combining spatial and temporal correction - Remi's note: +fMRIprep does it), but I believe the current thinking is that doing slice timing +first minimizes your possible error. The set of voxels subject to such an +interpolation error is small, and the interpolation into another TR will also be +small and will only affect a few TRs in the time course. By contrast, if one +realigns first, many voxels in a slice could be affected at once, and their +whole time courses will be affected. I think that's why it makes sense to do +slice timing first. That said, here's some articles from the SPM e-mail list +that comment helpfully on this subject both ways, and there are even more if you +do a search for "slice timing AND before" in the archives of the list._ ## Boiler plate methods section - ### Preprocessing -The fMRI data were pre-processed and analyzed using statistical parametric mapping (SPM12 – v7487; Wellcome Center for Neuroimaging, London, UK; www.fil.ion.ucl.ac.uk/spm) running on {octave 4.{??} / matlab 20{XX} (Mathworks)}. +The fMRI data were pre-processed and analyzed using statistical parametric +mapping (SPM12 – v7487; Wellcome Center for Neuroimaging, London, UK; +www.fil.ion.ucl.ac.uk/spm) running on {octave 4.{??} / matlab 20{XX} +(Mathworks)}. -The preprocessing of the functional images was performed in the following order: removing of dummy scans, {slice timing correction}, realignment, normalization to MNI, smoothing. +The preprocessing of the functional images was performed in the following order: +removing of dummy scans, {slice timing correction}, realignment, normalization +to MNI, smoothing. {XX} dummy scans were removed to allow signal stabilization. -{Slice timing correction was then performed taking the {XX} th slice as a reference (interpolation: sinc interpolation).} - -Functional scans from each participant were realigned using the mean image as a reference (SPM 2 passes ; number of degrees of freedom: 6 ; cost function: least square) (Friston et al, 1995). +{Slice timing correction was then performed taking the {XX} th slice as a +reference (interpolation: sinc interpolation).} -The mean image obtained from realignement was then co-registered to the anatomical T1 image (number of degrees of freedom: 6 ; cost function: normalized mutual information) (Friston et al, 1995). The transformation matrix from this coregistration was then applied to all the functional images. +Functional scans from each participant were realigned using the mean image as a +reference (SPM 2 passes ; number of degrees of freedom: 6 ; cost function: least +square) (Friston et al, 1995). -The anatomical T1 image was bias field corrected, segmented and normalized to MNI space (target space resolution: 1 mm ; interpolation: 4th degree b-spline) using a unified segmentation. The deformation field obtained from this step was then applied to all the functional images (target space resolution equal that used at acquisition ; interpolation: 4th degree b-spline) +The mean image obtained from realignement was then co-registered to the +anatomical T1 image (number of degrees of freedom: 6 ; cost function: normalized +mutual information) (Friston et al, 1995). The transformation matrix from this +coregistration was then applied to all the functional images. -Functional MNI normalized images were then spatially smoothed using a 3D gaussian kernel (FWHM = {XX} mm). +The anatomical T1 image was bias field corrected, segmented and normalized to +MNI space (target space resolution: 1 mm ; interpolation: 4th degree b-spline) +using a unified segmentation. The deformation field obtained from this step was +then applied to all the functional images (target space resolution equal that +used at acquisition ; interpolation: 4th degree b-spline) +Functional MNI normalized images were then spatially smoothed using a 3D +gaussian kernel (FWHM = {XX} mm). ### fMRI data analysis -At the subject level, we performed a mass univariate analysis with a linear regression at each voxel of the brain, using generalized least squares with a global FAST model to account for temporal auto-correlation (Corbin et al, 2018) and a drift fit with discrete cosine transform basis (128 seconds cut-off). Image intensity scaling was done run-wide before statistical modeling such that the mean image will have mean intracerebral intensity of 100. +At the subject level, we performed a mass univariate analysis with a linear +regression at each voxel of the brain, using generalized least squares with a +global FAST model to account for temporal auto-correlation (Corbin et al, 2018) +and a drift fit with discrete cosine transform basis (128 seconds cut-off). +Image intensity scaling was done run-wide before statistical modeling such that +the mean image will have mean intracerebral intensity of 100. -We modeled the fMRI experiment in an event related design with regressors entered into the run-specific design matrix after convolving the onsets of each event with a canonical hemodynamic response function (HRF). +We modeled the fMRI experiment in an event related design with regressors +entered into the run-specific design matrix after convolving the onsets of each +event with a canonical hemodynamic response function (HRF). Table of conditions with duration of each event: WIP -Nuisance covariates included the 6 realignment parameters to account for residual motion artefacts. +Nuisance covariates included the 6 realignment parameters to account for +residual motion artefacts. -Contrast images were computed for the following condition and spatially smoothed using a 3D gaussian kernel (FWHM = {XX} mm). +Contrast images were computed for the following condition and spatially smoothed +using a 3D gaussian kernel (FWHM = {XX} mm). Table of constrast with weight: WIP Group level: WIP - ### References -Friston KJ, Ashburner J, Frith CD, Poline J-B, Heather JD & Frackowiak RSJ (1995) Spatial registration and normalization of images Hum. Brain Map. 2:165-189 - -Corbin, N., Todd, N., Friston, K. J. & Callaghan, M. F. Accurate modeling of temporal correlations in rapidly sampled fMRI time series. Hum. Brain Mapp. 39, 3884–3897 (2018). +Friston KJ, Ashburner J, Frith CD, Poline J-B, Heather JD & Frackowiak RSJ +(1995) Spatial registration and normalization of images Hum. Brain Map. +2:165-189 +Corbin, N., Todd, N., Friston, K. J. & Callaghan, M. F. Accurate modeling of +temporal correlations in rapidly sampled fMRI time series. Hum. Brain Mapp. 39, +3884–3897 (2018). ## Unit testing -All tests are in the test folder. There is also an empty dummy BIDS dataset that is partly created using the bash script `createDummyDataSet.sh`. +All tests are in the test folder. There is also an empty dummy BIDS dataset that +is partly created using the bash script `createDummyDataSet.sh`. ## Changelog @@ -346,7 +437,8 @@ All tests are in the test folder. There is also an empty dummy BIDS dataset that ## Contributors ✨ -Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): +Thanks goes to these wonderful people +([emoji key](https://allcontributors.org/docs/en/emoji-key)): @@ -362,4 +454,6 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d -This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! +This project follows the +[all-contributors](https://github.com/all-contributors/all-contributors) +specification. Contributions of any kind welcome! diff --git a/tests/README.md b/tests/README.md index 603db339..f884994b 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,8 +1,10 @@ # README +```matlab coverage = mocov( ... '-expression', 'moxunit_runtests()', ... '-verbose', ... '-cover', fullfile(pwd, '..', 'subfun'), .... '-cover_xml_file', 'coverage.xml', ... - '-cover_html_dir', 'coverage_html') \ No newline at end of file + '-cover_html_dir', 'coverage_html') +``` \ No newline at end of file diff --git a/tests/dummyData/raw/README.md b/tests/dummyData/raw/README.md index 70a0110b..49892820 100644 --- a/tests/dummyData/raw/README.md +++ b/tests/dummyData/raw/README.md @@ -1,3 +1,3 @@ # README -placeholder \ No newline at end of file +placeholder From 348fc13a5ac674c92e163de7b9e20cd79ce257d9 Mon Sep 17 00:00:00 2001 From: Remi Gau Date: Thu, 24 Sep 2020 17:55:10 +0200 Subject: [PATCH 6/6] change all contributors cfg --- .all-contributorsrc | 1 + 1 file changed, 1 insertion(+) diff --git a/.all-contributorsrc b/.all-contributorsrc index e5082f64..22dbbab4 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -2,6 +2,7 @@ "files": [ "README.md" ], + "contributorsSortAlphabetically": true, "imageSize": 100, "commit": false, "contributors": [