How to use the nipype.pipeline.engine.Workflow function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nbraingroup / fmridenoise / fmridenoise / workflows / base.py View on Github external
def init_fmridenoise_wf(bids_dir,
                        derivatives='fmriprep',
                        task=[],
                        session=[],
                        subject=[],
                        pipelines_paths=get_pipelines_paths(),
                        smoothing=True,
                        ica_aroma=False,
                        high_pass=0.008,
                        low_pass=0.08,
                        # desc=None,
                        # ignore=None, force_index=None,
                        base_dir='/tmp/fmridenoise', name='fmridenoise_wf'
                        ):
    workflow = pe.Workflow(name=name, base_dir=base_dir)
    temps.base_dir = base_dir

    # 1) --- Selecting pipeline

    # Inputs: fulfilled
    pipelineselector = pe.Node(
       PipelineSelector(),
       name="PipelineSelector")
    pipelineselector.iterables = ('pipeline_path', pipelines_paths)
    # Outputs: pipeline, pipeline_name, low_pass, high_pass

    # 2) --- Loading BIDS structure

    # Inputs: directory, task, derivatives
    grabbing_bids = pe.Node(
        BIDSGrab(
github mwaskom / lyman / lyman / workflows / archive / restingstate.py View on Github external
iterfield=["in_file"],
                             name="convertnormsurf")

    # Rename the timeseries
    rename = pe.MapNode(util.Rename(format_string="%(hemi)s.timeseries.fsaverage",
                                    keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    # Define the outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries"]),
                         name="outputs")

    
    # Define and connect the workflow
    tosurf = pe.Workflow(name=name)
    tosurf.connect([
        (inputnode,       surfproject,    [("timeseries", "source_file"),
                                           ("subject_id", "subject_id"),
                                           ("tkreg_affine", "reg_file")]),
        (hemisource,      surfproject,    [("hemi", "hemi")]),
        (surfproject,     surftransform,  [("out_file", "source_file")]),
        (inputnode,       surftransform,  [("subject_id", "source_subject")]),
        (hemisource,      surftransform,  [("hemi", "hemi")]),
        (surftransform,   smoothnormsurf, [("out_file", "in_file")]),
        (hemisource,      smoothnormsurf, [("hemi", "hemi")]),
        (inputnode,       smoothnormsurf, [("smooth_fwhm", "fwhm")]),
        (smoothnormsurf,  cvtnormsurf,    [("out_file", "in_file")]),
        (cvtnormsurf,     rename,         [("out_file", "in_file")]),
        (hemisource,      rename,         [("hemi", "hemi")]),
        (rename,          outputnode,     [("out_file", "timeseries")]),
        ])
github nipy / nipype / nipype / workflows / dmri / connectivity / group_connectivity.py View on Github external
l3inputnode = pe.Node(
        interface=util.IdentityInterface(fields=[
            'Group_CFFs', 'Group_CSVnodemetrics', 'Group_CSVglobalmetrics',
            'Group_CSVmatrices'
        ]),
        name='l3inputnode')

    MergeCNetworks_grp = pe.Node(
        interface=cmtk.MergeCNetworks(), name="MergeCNetworks_grp")
    MergeCNetworks_grp.inputs.out_file = title

    l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink")
    l3datasink.inputs.base_directory = output_dir

    l3pipeline = pe.Workflow(name="l3output")
    l3pipeline.base_dir = output_dir
    l3pipeline.connect([
        (l3infosource, l3source, [('group_id', 'group_id')]),
        (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]),
        (l3source, l3inputnode, [('CSVnodemetrics', 'Group_CSVnodemetrics')]),
        (l3source, l3inputnode, [('CSVglobalmetrics',
                                  'Group_CSVglobalmetrics')]),
        (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]),
    ])

    l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs',
                                                            'in_files')])])
    l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file',
                                                           '@l3output')])])

    concat_csv_interface = Function(
github poldracklab / niworkflows / niworkflows / anat / skullstrip.py View on Github external
in_file : str
        input T1w image.

    Outputs
    -------
    bias_corrected : str
        path to the bias corrected input MRI.
    out_file : str
        path to the skull-stripped image.
    out_mask : str
        path to the generated brain mask.
    bias_image : str
        path to the B1 inhomogeneity field.

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["bias_corrected", "out_file", "out_mask", "bias_image"]
        ),
        name="outputnode",
    )

    inu_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=True,
            num_threads=n4_nthreads,
            rescale_intensities=True,
            copy_header=True,
        ),
github nipy / nipype / examples / rsfmri_fsl.py View on Github external
"""
Band pass filter the data to remove frequencies below .1 Hz
"""

bandPassFilterData = pe.Node(interface=fsl.ImageMaths(op_string = ' -bptf 128 12.5 '),
                             name='bandpassfiltermcdata_fslmaths')


"""
Set up complete workflow
========================
"""

l1pipeline = pe.Workflow(name= "resting")
l1pipeline.base_dir = os.path.abspath('./fslresting/workingdir')
l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
                    (datasource, csffilter, [('struct','nosestrip.in_file'),
                                             ('func', 'realign.in_file'),
                                             #(('func', pickfirst), 'extractref.in_file'),
                                             ('func', 'extractref.in_file'),
                                              ]),
                    (csffilter, modelfit, [('stripfunc.out_file', 'modelspec.functional_runs'),
                                           ('realign.par_file', 'modelspec.realignment_parameters'),
                                           (('extractcsfts.out_file', subjectinfo),'modelspec.subject_info'),
                                           ('stripfunc.out_file', 'modelestimate.in_file')
                                           ]),
                    (modelfit, bandPassFilterData, [('modelestimate.residual4d', 'in_file')]),
                    ])

if __name__ == '__main__':
github IBT-FMI / SAMRI / samri / pipelines / glm.py View on Github external
workflow_connections.extend([
				(varcopes, varcopemerge, [('selection', 'in_files')]),
				])

	crashdump_dir = path.join(out_base,'crashdump')
	workflow_config = {'execution': {'crashdump_dir': crashdump_dir}}
	if debug:
		workflow_config['logging'] = {
			'workflow_level':'DEBUG',
			'utils_level':'DEBUG',
			'interface_level':'DEBUG',
			'filemanip_level':'DEBUG',
			'log_to_file':'true',
			}

	workflow = pe.Workflow(name=workdir_name)
	workflow.connect(workflow_connections)
	workflow.base_dir = out_base
	workflow.config = workflow_config
	try:
		workflow.write_graph(dotfilename=path.join(workflow.base_dir,workdir_name,"graph.dot"), graph2use="hierarchical", format="png")
	except OSError:
		print('We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.')

	n_jobs = max(int(round(mp.cpu_count()*n_jobs_percentage)),2)
	workflow.run(plugin="MultiProc", plugin_args={'n_procs' : n_jobs})
	if not keep_crashdump:
		try:
			shutil.rmtree(crashdump_dir)
		except (FileNotFoundError, OSError):
			pass
	if not keep_work:
github nipy / nipype / old-docs / 0.10.0 / _downloads / fmri_freesurfer_smooth.py View on Github external
that visually represents the workflow.
"""

if __name__ == '__main__':
    level1.run()
    level1.write_graph(graph2use='flat')


"""
Level2 surface-based pipeline
-----------------------------

Create a level2 workflow
"""

l2flow = pe.Workflow(name='l2out')
l2flow.base_dir = os.path.abspath('volsurf_tutorial')

"""
Setup a dummy node to iterate over contrasts and hemispheres
"""

l2inputnode = pe.Node(interface=util.IdentityInterface(fields=['contrasts',
                                                               'hemi']),
                      name='inputnode')
l2inputnode.iterables = [('contrasts', range(1,len(contrasts)+1)),
                         ('hemi', ['lh','rh'])]

"""
Use a datagrabber node to collect contrast images and registration files
"""
github miykael / nipype-beginner-s-guide / scripts / example_fMRI_1_first_level.py View on Github external
# Volume Transformation - transform contrasts into anatomical space
applyVolReg = MapNode(ApplyVolTransform(fs_target=True),
                      name='applyVolReg',
                      iterfield=['source_file'])

# MRIConvert - to gzip output files
mriconvert = MapNode(MRIConvert(out_type='niigz'),
                     name='mriconvert',
                     iterfield=['in_file'])


###
# Specify 1st-Level Analysis Workflow & Connect Nodes

# Initiation of the 1st-level analysis workflow
l1analysis = Workflow(name='l1analysis')

# Connect up the 1st-level analysis components
l1analysis.connect([(modelspec, level1design, [('session_info',
                                                'session_info')]),
                    (level1design, level1estimate, [('spm_mat_file',
                                                     'spm_mat_file')]),
                    (level1estimate, conestimate, [('spm_mat_file',
                                                    'spm_mat_file'),
                                                   ('beta_images',
                                                    'beta_images'),
                                                   ('residual_image',
                                                    'residual_image')]),
                    (conestimate, applyVolReg, [('con_images',
                                                 'source_file')]),
                    (applyVolReg, mriconvert, [('transformed_file',
                                                'in_file')]),
github APPIAN-PET / APPIAN / Quantification / quantification.py View on Github external
|   |   |           | 
              | +-+-+ +-----+-----+
              | |   |       |
              | |   |       |
              V V   V       | 
              +-+-+ +-----+ |
              |ROI| |Voxel| |
              +-+-+ +---+-+ |
                ^       ^   | 
                |       |   |
                +-------+---+


    :returns: workflow
    '''
    workflow = pe.Workflow(name=name)
    #Define input node that will receive input from outside of workflow
    inputnode = pe.Node(niu.IdentityInterface(fields=["stereo","tfm_mri_stx", "tfm_pet_mri", "like_file", "in_file", "header",  "reference", "mask", "like_file"] ), name='inputnode')

    out_files = ["out_file", "out_file_stereo"]
    #Define empty node for output
    outputnode = pe.Node(niu.IdentityInterface(fields=out_files), name='outputnode')

    ### Quantification module
    sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+"/methods/" )
    print(os.path.dirname(os.path.abspath(__file__))+"/methods/") 
    quant_module_fn="quant_method_"+opts.quant_method #+".py"

    #quant_module = importlib.import_module(quant_module_fn)
    try :
        quant_module = importlib.import_module(quant_module_fn)
    except ImportError :
github FCP-INDI / C-PAC / CPAC / pipeline / cpac_group_analysis_pipeline.py View on Github external
model, subject_list = model_sub
        
        print "running for model %s and resource %s..." % (os.path.basename(model), resource)
        

        if not os.path.exists(model):
            raise Exception("path to model %s doesn't exit"%model)
        
        if not os.path.exists(subject_list):
            raise Exception("path to input subject list %s is invalid" % subject_list)
        
        #if c.mixedScanAnalysis == True:
        #    wf = pe.Workflow(name = 'group_analysis/%s/grp_model_%s'%(resource, os.path.basename(model)))
        #else:

        wf = pe.Workflow(name = 'group_analysis__%s__grp_model_%s__%s' % (resource, os.path.basename(model), scan_ids[0])) 

        wf.base_dir = c.workingDirectory
        wf.config['execution'] = {'hash_method': 'timestamp', 'crashdump_dir': os.path.abspath(c.crashLogDirectory)}
        log_dir = os.path.join(c.outputDirectory, 'logs', 'group_analysis', resource, 'model_%s' % (os.path.basename(model)))
        try:
            os.makedirs(log_dir)
        except:
            print "log_dir already exist"
        



        # enable logging
    
        from nipype import config
        from nipype import logging