Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
only_texinfo=only_texinfo,
options=opts,
images=images,
source_code=source_code,
html_show_formats=config.wf_html_show_formats and len(images),
caption=caption)
total_lines.extend(result.split("\n"))
total_lines.extend("\n")
if total_lines:
state_machine.insert_input(total_lines, source=source_file_name)
# copy image files to builder's output directory, if necessary
if not os.path.exists(dest_dir):
mkdirs(dest_dir)
for code_piece, images in results:
for img in images:
for fn in img.filenames():
destimg = os.path.join(dest_dir, os.path.basename(fn))
if fn != destimg:
shutil.copyfile(fn, destimg)
# copy script (if necessary)
target_name = os.path.join(dest_dir, output_base + source_ext)
with io.open(target_name, 'w', encoding="utf-8") as f:
if source_file_name == rst_file:
code_escaped = unescape_doctest(code)
else:
code_escaped = code
f.write(code_escaped)
continue
workflow.base_dir = settings['work_dir']
if settings.get('write_graph', False):
workflow.write_graph()
if not opts.dry_run:
workflow.run(**plugin_settings)
# Set up group level
if opts.analysis_level == 'group' or opts.participant_label is None:
from glob import glob
from mriqc.reports import group_html
from mriqc.utils.misc import generate_csv
reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
derivatives_dir = op.join(settings['output_dir'], 'derivatives')
for qctype in opts.data_type:
qcjson = op.join(derivatives_dir, '{}*.json'.format(qctype[:4]))
# If there are no iqm.json files, nothing to do.
if not qcjson:
MRIQC_LOG.warn(
'Generating group-level report for the "%s" data type - '
'no IQM-JSON files were found in "%s"', qctype, derivatives_dir)
continue
# If some were found, generate the CSV file and group report
out_csv = op.join(settings['output_dir'], qctype[:4] + 'MRIQC.csv')
out_html = op.join(reports_dir, qctype[:4] + '_group.html')
generate_csv(glob(qcjson), out_csv)
analysis_levels = opts.analysis_level
if opts.participant_label is None:
analysis_levels.append('group')
analysis_levels = list(set(analysis_levels))
if len(analysis_levels) > 2:
raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))
settings['report_dir'] = opts.report_dir
if not settings['report_dir']:
settings['report_dir'] = op.join(settings['output_dir'], 'reports')
check_folder(settings['output_dir'])
if 'participant' in analysis_levels:
check_folder(settings['work_dir'])
check_folder(log_dir)
check_folder(settings['report_dir'])
# Set nipype config
ncfg.update_config({
'logging': {'log_directory': log_dir, 'log_to_file': True},
'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
})
callback_log_path = None
plugin_settings = {'plugin': 'Linear'}
if opts.use_plugin is not None:
from yaml import load as loadyml
with open(opts.use_plugin) as pfile:
plugin_settings = loadyml(pfile)
else:
# Setup multiprocessing
settings['start_idx'] = opts.start_idx
if opts. stop_idx:
settings['stop_idx'] = opts.stop_idx
if opts.ants_settings:
settings['ants_settings'] = opts.ants_settings
log_dir = op.join(settings['output_dir'], 'logs')
settings['report_dir'] = opts.report_dir
if not settings['report_dir']:
settings['report_dir'] = op.join(settings['output_dir'], 'reports')
check_folder(settings['output_dir'])
check_folder(settings['work_dir'])
check_folder(log_dir)
check_folder(settings['report_dir'])
# Set nipype config
ncfg.update_config({
'logging': {'log_directory': log_dir, 'log_to_file': True},
'execution': {'crashdump_dir': log_dir}
})
plugin_settings = {'plugin': 'Linear'}
if opts.use_plugin is not None:
from yaml import load as loadyml
with open(opts.use_plugin) as pfile:
plugin_settings = loadyml(pfile)
else:
# Setup multiprocessing
if settings['n_procs'] == 0:
logger.addHandler(handler)
workflow.run(**plugin_settings)
if callback_log_path is not None:
from nipype.utils.draw_gantt_chart import generate_gantt_chart
generate_gantt_chart(callback_log_path, cores=settings['n_procs'])
else:
raise RuntimeError('Error reading BIDS directory (%s), or the dataset is not '
'BIDS-compliant.' % settings['bids_dir'])
# Set up group level
if 'group' in analysis_levels:
from mriqc.reports import group_html
from mriqc.utils.misc import generate_csv, generate_pred
reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
derivatives_dir = op.join(settings['output_dir'], 'derivatives')
n_group_reports = 0
for mod in modalities:
dataframe, out_csv = generate_csv(derivatives_dir,
settings['output_dir'], mod)
# If there are no iqm.json files, nothing to do.
if dataframe is None:
MRIQC_LOG.warn(
'No IQM-JSON files were found for the %s data type in %s. The group-level '
'report was not generated.', mod, derivatives_dir)
continue
MRIQC_LOG.info('Summary CSV table for the %s data generated (%s)', mod, out_csv)
if opts. stop_idx:
settings['stop_idx'] = opts.stop_idx
if opts.ants_settings:
settings['ants_settings'] = opts.ants_settings
log_dir = op.join(settings['output_dir'], 'logs')
settings['report_dir'] = opts.report_dir
if not settings['report_dir']:
settings['report_dir'] = op.join(settings['output_dir'], 'reports')
check_folder(settings['output_dir'])
check_folder(settings['work_dir'])
check_folder(log_dir)
check_folder(settings['report_dir'])
# Set nipype config
ncfg.update_config({
'logging': {'log_directory': log_dir, 'log_to_file': True},
'execution': {'crashdump_dir': log_dir}
})
plugin_settings = {'plugin': 'Linear'}
if opts.use_plugin is not None:
from yaml import load as loadyml
with open(opts.use_plugin) as pfile:
plugin_settings = loadyml(pfile)
else:
# Setup multiprocessing
if settings['n_procs'] == 0:
settings['n_procs'] = 1