|
29 | 29 | from CPAC.utils.configuration import check_pname, Configuration, set_subject
|
30 | 30 | from CPAC.utils.configuration.yaml_template import upgrade_pipeline_to_1_8
|
31 | 31 | from CPAC.utils.ga import track_run
|
32 |
| -from CPAC.utils.monitoring import failed_to_start, log_nodes_cb, WFLOGGER |
| 32 | +from CPAC.utils.monitoring import failed_to_start, init_loggers, log_nodes_cb, WFLOGGER |
33 | 33 |
|
34 | 34 |
|
35 |
| -# Run condor jobs |
36 | 35 | def run_condor_jobs(c, config_file, subject_list_file, p_name):
|
| 36 | + """Run condor jobs.""" |
37 | 37 | # Import packages
|
38 | 38 | import subprocess
|
39 | 39 | from time import strftime
|
@@ -249,6 +249,8 @@ def run_T1w_longitudinal(sublist, cfg: Configuration, dry_run: bool = False):
|
249 | 249 | # sessions for each participant as value
|
250 | 250 | for subject_id, sub_list in subject_id_dict.items():
|
251 | 251 | if len(sub_list) > 1:
|
| 252 | + _, _, log_dir = set_subject(subject_id_dict, cfg) |
| 253 | + init_loggers(subject_id, cfg, log_dir, mock=True, longitudinal=True) |
252 | 254 | anat_longitudinal_wf(subject_id, sub_list, cfg, dry_run=dry_run)
|
253 | 255 | elif len(sub_list) == 1:
|
254 | 256 | warnings.warn(
|
@@ -491,161 +493,10 @@ def run(
|
491 | 493 | """
|
492 | 494 |
|
493 | 495 | # BEGIN LONGITUDINAL TEMPLATE PIPELINE
|
494 |
| - if ( |
495 |
| - hasattr(c, "longitudinal_template_generation") |
496 |
| - and c.longitudinal_template_generation["run"] |
497 |
| - ): |
| 496 | + if c["longitudinal_template_generation", "run"]: |
498 | 497 | run_T1w_longitudinal(sublist, c, dry_run=test_config)
|
499 | 498 | # TODO functional longitudinal pipeline
|
500 | 499 |
|
501 |
| - """ |
502 |
| - if valid_longitudinal_data: |
503 |
| - rsc_file_list = [] |
504 |
| - for dirpath, dirnames, filenames in os.walk(c.pipeline_setup[ |
505 |
| - 'output_directory']['path']): |
506 |
| - for f in filenames: |
507 |
| - # TODO is there a better way to check output folder name? |
508 |
| - if f != '.DS_Store' and 'T1w_longitudinal_pipeline' in dirpath: |
509 |
| - rsc_file_list.append(os.path.join(dirpath, f)) |
510 |
| -
|
511 |
| - subject_specific_dict = {subj: [] for subj in subject_id_dict.keys()} |
512 |
| - session_specific_dict = {os.path.join(session['subject_id'], session['unique_id']): [] for session in sublist} |
513 |
| - for rsc_path in rsc_file_list: |
514 |
| - key = [s for s in session_specific_dict.keys() if s in rsc_path] |
515 |
| - if key: |
516 |
| - session_specific_dict[key[0]].append(rsc_path) |
517 |
| - else: |
518 |
| - subj = [s for s in subject_specific_dict.keys() if s in rsc_path] |
519 |
| - if subj: |
520 |
| - subject_specific_dict[subj[0]].append(rsc_path) |
521 |
| -
|
522 |
| - # update individual-specific outputs: |
523 |
| - # anatomical_brain, anatomical_brain_mask and anatomical_reorient |
524 |
| - for key in session_specific_dict.keys(): |
525 |
| - for f in session_specific_dict[key]: |
526 |
| - sub, ses = key.split('/') |
527 |
| - ses_list = [subj for subj in sublist if sub in subj['subject_id'] and ses in subj['unique_id']] |
528 |
| - if len(ses_list) > 1: |
529 |
| - raise Exception("There are several files containing " + f) |
530 |
| - if len(ses_list) == 1: |
531 |
| - ses = ses_list[0] |
532 |
| - subj_id = ses['subject_id'] |
533 |
| - tmp = f.split(c.pipeline_setup['output_directory']['path'])[-1] |
534 |
| - keys = tmp.split(os.sep) |
535 |
| - if keys[0] == '': |
536 |
| - keys = keys[1:] |
537 |
| - if len(keys) > 1: |
538 |
| - if ses.get('resource_pool') is None: |
539 |
| - ses['resource_pool'] = { |
540 |
| - keys[0].split(c.pipeline_setup['pipeline_name'] + '_')[-1]: { |
541 |
| - keys[-2]: f |
542 |
| - } |
543 |
| - } |
544 |
| - else: |
545 |
| - strat_key = keys[0].split(c.pipeline_setup['pipeline_name'] + '_')[-1] |
546 |
| - if ses['resource_pool'].get(strat_key) is None: |
547 |
| - ses['resource_pool'].update({ |
548 |
| - strat_key: { |
549 |
| - keys[-2]: f |
550 |
| - } |
551 |
| - }) |
552 |
| - else: |
553 |
| - ses['resource_pool'][strat_key].update({ |
554 |
| - keys[-2]: f |
555 |
| - }) |
556 |
| -
|
557 |
| - for key in subject_specific_dict: |
558 |
| - for f in subject_specific_dict[key]: |
559 |
| - ses_list = [subj for subj in sublist if key in subj['anat']] |
560 |
| - for ses in ses_list: |
561 |
| - tmp = f.split(c.pipeline_setup['output_directory']['path'])[-1] |
562 |
| - keys = tmp.split(os.sep) |
563 |
| - if keys[0] == '': |
564 |
| - keys = keys[1:] |
565 |
| - if len(keys) > 1: |
566 |
| - if ses.get('resource_pool') is None: |
567 |
| - ses['resource_pool'] = { |
568 |
| - keys[0].split(c.pipeline_setup['pipeline_name'] + '_')[-1]: { |
569 |
| - keys[-2]: f |
570 |
| - } |
571 |
| - } |
572 |
| - else: |
573 |
| - strat_key = keys[0].split(c.pipeline_setup['pipeline_name'] + '_')[-1] |
574 |
| - if ses['resource_pool'].get(strat_key) is None: |
575 |
| - ses['resource_pool'].update({ |
576 |
| - strat_key: { |
577 |
| - keys[-2]: f |
578 |
| - } |
579 |
| - }) |
580 |
| - else: |
581 |
| - if keys[-2] == 'anatomical_brain' or keys[-2] == 'anatomical_brain_mask' or keys[-2] == 'anatomical_skull_leaf': |
582 |
| - pass |
583 |
| - elif 'apply_warp_anat_longitudinal_to_standard' in keys[-2] or 'fsl_apply_xfm_longitudinal' in keys[-2]: |
584 |
| - # TODO update!!! |
585 |
| - # it assumes session id == last key (ordered by session count instead of session id) + 1 |
586 |
| - # might cause problem if session id is not continuous |
587 |
| - def replace_index(target1, target2, file_path): |
588 |
| - index1 = file_path.index(target1)+len(target1) |
589 |
| - index2 = file_path.index(target2)+len(target2) |
590 |
| - file_str_list = list(file_path) |
591 |
| - file_str_list[index1] = "*" |
592 |
| - file_str_list[index2] = "*" |
593 |
| - file_path_updated = "".join(file_str_list) |
594 |
| - file_list = glob.glob(file_path_updated) |
595 |
| - file_list.sort() |
596 |
| - return file_list |
597 |
| - if ses['unique_id'] == str(int(keys[-2][-1])+1): |
598 |
| - if keys[-3] == 'seg_probability_maps': |
599 |
| - f_list = replace_index('seg_probability_maps_', 'segment_prob_', f) |
600 |
| - ses['resource_pool'][strat_key].update({ |
601 |
| - keys[-3]: f_list |
602 |
| - }) |
603 |
| - elif keys[-3] == 'seg_partial_volume_files': |
604 |
| - f_list = replace_index('seg_partial_volume_files_', 'segment_pve_', f) |
605 |
| - ses['resource_pool'][strat_key].update({ |
606 |
| - keys[-3]: f_list |
607 |
| - }) |
608 |
| - else: |
609 |
| - ses['resource_pool'][strat_key].update({ |
610 |
| - keys[-3]: f # keys[-3]: 'anatomical_to_standard' |
611 |
| - }) |
612 |
| - elif keys[-2] != 'warp_list': |
613 |
| - ses['resource_pool'][strat_key].update({ |
614 |
| - keys[-2]: f |
615 |
| - }) |
616 |
| - elif keys[-2] == 'warp_list': |
617 |
| - if 'ses-'+ses['unique_id'] in tmp: |
618 |
| - ses['resource_pool'][strat_key].update({ |
619 |
| - keys[-2]: f |
620 |
| - }) |
621 |
| - for key in subject_specific_dict: |
622 |
| - ses_list = [subj for subj in sublist if key in subj['anat']] |
623 |
| - for ses in ses_list: |
624 |
| - for reg_strat in strat_list: |
625 |
| - try: |
626 |
| - ss_strat_list = list(ses['resource_pool']) |
627 |
| - for strat_key in ss_strat_list: |
628 |
| - try: |
629 |
| - ses['resource_pool'][strat_key].update({ |
630 |
| - 'registration_method': reg_strat['registration_method'] |
631 |
| - }) |
632 |
| - except KeyError: |
633 |
| - pass |
634 |
| - except KeyError: |
635 |
| - pass |
636 |
| -
|
637 |
| - yaml.dump(sublist, open(os.path.join(c.pipeline_setup['working_directory']['path'],'data_config_longitudinal.yml'), 'w'), default_flow_style=False) |
638 |
| - WFLOGGER.info("\n\nLongitudinal pipeline completed.\n\n") |
639 |
| -
|
640 |
| - # skip main preprocessing |
641 |
| - if ( |
642 |
| - not c.anatomical_preproc['run'] and |
643 |
| - not c.functional_preproc['run'] |
644 |
| - ): |
645 |
| - sys.exit() |
646 |
| - """ |
647 |
| - # END LONGITUDINAL TEMPLATE PIPELINE |
648 |
| - |
649 | 500 | # If it only allows one, run it linearly
|
650 | 501 | if c.pipeline_setup["system_config"]["num_participants_at_once"] == 1:
|
651 | 502 | for sub in sublist:
|
|
0 commit comments