From b0b6f0e0bbedd98dd65c18d18b978e2661f240f4 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 06:52:41 +0100 Subject: [PATCH 1/7] feat(controller): Added sub parse_recipe_prerequisites in Recipe.pm --- lib/MIP/Get/Parameter.pm | 9 ++-- lib/MIP/Recipe.pm | 84 ++++++++++++++++++++++++++++++++++ t/get_recipe_attributes.t | 6 +-- t/get_recipe_resources.t | 8 ++-- t/parse_recipe_prerequisites.t | 82 +++++++++++++++++++++++++++++++++ templates/code/test.t | 6 +-- 6 files changed, 176 insertions(+), 19 deletions(-) create mode 100644 lib/MIP/Recipe.pm create mode 100644 t/parse_recipe_prerequisites.t diff --git a/lib/MIP/Get/Parameter.pm b/lib/MIP/Get/Parameter.pm index e4ee4b6cb..0dc03baca 100644 --- a/lib/MIP/Get/Parameter.pm +++ b/lib/MIP/Get/Parameter.pm @@ -50,20 +50,17 @@ sub get_recipe_attributes { strict_type => 1, }, recipe_name => { - required => 1, defined => 1, - store => \$recipe_name, + required => 1, strict_type => 1, + store => \$recipe_name, }, }; check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; ## Get attribute value - if ( defined $attribute && $attribute ) { - - return $parameter_href->{$recipe_name}{$attribute}; - } + return $parameter_href->{$recipe_name}{$attribute} if ( defined $attribute and $attribute ); ## Get recipe attribute hash return %{ $parameter_href->{$recipe_name} }; diff --git a/lib/MIP/Recipe.pm b/lib/MIP/Recipe.pm new file mode 100644 index 000000000..f07afb818 --- /dev/null +++ b/lib/MIP/Recipe.pm @@ -0,0 +1,84 @@ +package MIP::Recipe; + +use 5.026; +use Carp; +use charnames qw{ :full :short }; +use English qw{ -no_match_vars }; +use open qw{ :encoding(UTF-8) :std }; +use Params::Check qw{ allow check last_error }; +use utf8; +use warnings; +use warnings qw{ FATAL utf8 }; + +## CPANM +use autodie qw{ :all }; + +BEGIN { + require Exporter; + use base qw{ Exporter }; + + # Set the version for version checking + # Functions and variables which can be optionally exported + our @EXPORT_OK = qw{ parse_recipe_prerequisites }; +} + +sub parse_recipe_prerequisites { + +## Function : Parse recipe prerequisites and return return them +## Returns : %recipe +## Arguments: $active_parameter_href => The active parameters for this analysis hash {REF} +## : $parameter_href => Holds all parameters +## : $recipe_name => Recipe name + + my ($arg_href) = @_; + + ## Flatten argument(s) + my $active_parameter_href; + my $parameter_href; + my $recipe_name; + + my $tmpl = { + active_parameter_href => { + default => {}, + defined => 1, + required => 1, + store => \$active_parameter_href, + strict_type => 1, + }, + parameter_href => { + default => {}, + defined => 1, + required => 1, + store => \$parameter_href, + strict_type => 1, + }, + recipe_name => { + defined => 1, + required => 1, + strict_type => 1, + store => \$recipe_name, + }, + }; + + check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; + + use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + + my %recipe_resource = get_recipe_resources( + { + active_parameter_href => $active_parameter_href, + recipe_name => $recipe_name, + } + ); + $recipe_resource{job_id_chain} = get_recipe_attributes( + { + parameter_href => $parameter_href, + recipe_name => $recipe_name, + attribute => q{chain}, + } + ); + + return %recipe_resource; +} + +1; diff --git a/t/get_recipe_attributes.t b/t/get_recipe_attributes.t index 52916a010..4222fd640 100644 --- a/t/get_recipe_attributes.t +++ b/t/get_recipe_attributes.t @@ -16,16 +16,12 @@ use warnings qw{ FATAL utf8 }; ## CPANM use autodie qw { :all }; use Modern::Perl qw{ 2018 }; -use Readonly; ## MIPs lib/ use lib catdir( dirname($Bin), q{lib} ); +use MIP::Constants qw{ $COMMA $SPACE }; use MIP::Test::Fixtures qw{ test_mip_hashes }; -## Constants -Readonly my $COMMA => q{,}; -Readonly my $SPACE => q{ }; - BEGIN { use MIP::Test::Fixtures qw{ test_import }; diff --git a/t/get_recipe_resources.t b/t/get_recipe_resources.t index 4c859cc77..cefafa0c8 100644 --- a/t/get_recipe_resources.t +++ b/t/get_recipe_resources.t @@ -50,8 +50,9 @@ diag( q{Test get_recipe_resources from Parameter.pm} test_log( {} ); ## Given a recipe name and active parameter hash -my %active_parameter = test_mip_hashes( { mip_hash_name => q{active_parameter}, recipe_name => q{deepvariant}, } ); -my $recipe_name = q{deepvariant}; +my %active_parameter = + test_mip_hashes( { mip_hash_name => q{active_parameter}, recipe_name => q{deepvariant}, } ); +my $recipe_name = q{deepvariant}; my %recipe_resource = get_recipe_resources( { @@ -64,9 +65,9 @@ my %recipe_resource = get_recipe_resources( my %expected = ( core_number => 35, gpu_number => 1, + load_env_ref => [qw{ conda activate test }], memory => 175, time => 10, - load_env_ref => [qw{conda activate test }], ); is_deeply( \%recipe_resource, \%expected, q{Got recipe resource hash} ); @@ -129,4 +130,5 @@ $recipe_memory = get_recipe_resources( ## Then return the recipe ram memory Readonly my $CORE_MEMORY_1 => 1; is( $recipe_memory, $CORE_MEMORY_1, q{Got core memory} ); + done_testing(); diff --git a/t/parse_recipe_prerequisites.t b/t/parse_recipe_prerequisites.t new file mode 100644 index 000000000..42491ab30 --- /dev/null +++ b/t/parse_recipe_prerequisites.t @@ -0,0 +1,82 @@ +#! /usr/bin/env perl + +use 5.026; +use Carp; +use charnames qw{ :full :short }; +use English qw{ -no_match_vars }; +use File::Basename qw{ dirname }; +use File::Spec::Functions qw{ catdir }; +use FindBin qw{ $Bin }; +use open qw{ :encoding(UTF-8) :std }; +use Params::Check qw{ allow check last_error }; +use Test::More; +use utf8; +use warnings qw{ FATAL utf8 }; + +## CPANM +use autodie qw { :all }; +use Modern::Perl qw{ 2018 }; + +## MIPs lib/ +use lib catdir( dirname($Bin), q{lib} ); +use MIP::Constants qw{ $COMMA $SPACE }; +use MIP::Test::Fixtures qw{ test_log test_mip_hashes }; + +BEGIN { + + use MIP::Test::Fixtures qw{ test_import }; + +### Check all internal dependency modules and imports +## Modules with import + my %perl_module = ( + q{MIP::Recipe} => [qw{ parse_recipe_prerequisites }], + q{MIP::Test::Fixtures} => [qw{ test_log test_mip_hashes }], + ); + + test_import( { perl_module_href => \%perl_module, } ); +} + +use MIP::Recipe qw{ parse_recipe_prerequisites }; + +diag( q{Test parse_recipe_prerequisites from Recipe.pm} + . $COMMA + . $SPACE . q{Perl} + . $SPACE + . $PERL_VERSION + . $SPACE + . $EXECUTABLE_NAME ); + +test_log( {} ); + +## Given a recipe name and active parameter hash +my $recipe_name = q{deepvariant}; +my %active_parameter = + test_mip_hashes( { mip_hash_name => q{active_parameter}, recipe_name => $recipe_name, } ); + +## Given a parameter hash +my %parameter = + test_mip_hashes( { mip_hash_name => q{define_parameter}, recipe_name => $recipe_name, } ); +$parameter{$recipe_name}{chain} = q{TEST}; + +## When parsing recipe prerequisites +my %recipe = parse_recipe_prerequisites( + { + active_parameter_href => \%active_parameter, + parameter_href => \%parameter, + recipe_name => $recipe_name, + } +); + +## Then return recipe prerequisites hash +my %expected_recipe = ( + core_number => 35, + gpu_number => 1, + job_id_chain => q{TEST}, + load_env_ref => [qw{ conda activate test }], + memory => 175, + time => 10, +); + +is_deeply( \%recipe, \%expected_recipe, q{Got recipe prerequisites hash} ); + +done_testing(); diff --git a/templates/code/test.t b/templates/code/test.t index bfbb1338b..6aa123094 100644 --- a/templates/code/test.t +++ b/templates/code/test.t @@ -21,17 +21,13 @@ use Modern::Perl qw{ 2018 }; use lib catdir( dirname($Bin), q{lib} ); use MIP::Constants qw{ $COMMA $SPACE }; - BEGIN { use MIP::Test::Fixtures qw{ test_import }; ### Check all internal dependency modules and imports ## Modules with import - my %perl_module = ( - q{MIP::PATH::TO::MODULE} => [qw{ SUB_ROUTINE }], - - ); + my %perl_module = ( q{MIP::PATH::TO::MODULE} => [qw{ SUB_ROUTINE }], ); test_import( { perl_module_href => \%perl_module, } ); } From b7014882cd697f06c89332077a87925307461b67 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 07:37:56 +0100 Subject: [PATCH 2/7] feat(refactor): Moved sub get_recipe_attributes and get_recipe_resources - Added cascading ternary operator - Refactor test --- lib/MIP/Active_parameter.pm | 81 +++++++++++++++++++++ lib/MIP/Get/Parameter.pm | 138 ------------------------------------ lib/MIP/Parameter.pm | 49 +++++++++++++ lib/MIP/Recipe.pm | 3 +- t/get_recipe_attributes.t | 30 +++++++- t/get_recipe_resources.t | 21 +++--- 6 files changed, 171 insertions(+), 151 deletions(-) diff --git a/lib/MIP/Active_parameter.pm b/lib/MIP/Active_parameter.pm index 253a733cc..9b7346894 100644 --- a/lib/MIP/Active_parameter.pm +++ b/lib/MIP/Active_parameter.pm @@ -42,6 +42,7 @@ BEGIN { get_not_allowed_temp_dirs get_package_env_attributes get_package_env_cmds + get_recipe_resources get_user_supplied_pedigree_parameter parse_infiles parse_recipe_resources @@ -1055,6 +1056,86 @@ sub get_package_env_cmds { return @env_method_cmds; } +sub get_recipe_resources { + +## Function : Return recipe resources +## Returns : $recipe_resource | %recipe_resource +## Arguments: $active_parameter_href => The active parameters for this analysis hash {REF} +## : $recipe_name => Recipe name +## : $recipe_resource => Recipe parameter key + + my ($arg_href) = @_; + + ## Flatten argument(s) + my $active_parameter_href; + my $recipe_name; + my $resource; + + my $tmpl = { + active_parameter_href => { + default => {}, + defined => 1, + required => 1, + store => \$active_parameter_href, + strict_type => 1, + }, + recipe_name => { + defined => 1, + required => 1, + store => \$recipe_name, + strict_type => 1, + }, + resource => { + allow => [qw{ core_number gpu_number load_env_ref memory time }], + store => \$resource, + strict_type => 1, + }, + }; + + check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; + + use MIP::Active_parameter qw{ get_package_env_cmds }; + use MIP::Environment::Cluster qw{ check_recipe_memory_allocation }; + + ## Initilize variable + my @environment_cmds = get_package_env_cmds( + { + active_parameter_href => $active_parameter_href, + package_name => $recipe_name, + } + ); + + my $core_number = $active_parameter_href->{recipe_core_number}{$recipe_name}; + my $process_memory = $active_parameter_href->{recipe_memory}{$recipe_name}; + my $core_ram_memory = $active_parameter_href->{core_ram_memory}; + + my $memory = + ( $process_memory and $core_number ) ? $process_memory * $core_number + : ( not $process_memory and $core_number ) ? $core_number * $core_ram_memory + : ( not $process_memory and not $core_number ) ? $core_ram_memory + : $process_memory; + + check_recipe_memory_allocation( + { + node_ram_memory => $core_ram_memory, + recipe_memory_allocation => $memory, + } + ); + + my %recipe_resource = ( + core_number => $core_number, + gpu_number => $active_parameter_href->{recipe_gpu_number}{$recipe_name}, + load_env_ref => \@environment_cmds, + memory => $memory, + time => $active_parameter_href->{recipe_time}{$recipe_name}, + ); + + return $recipe_resource{$resource} if ( defined $resource && $resource ); + + return %recipe_resource; + +} + sub get_user_supplied_pedigree_parameter { ## Function : Detect if user supplied info on parameters otherwise collected from pedigree diff --git a/lib/MIP/Get/Parameter.pm b/lib/MIP/Get/Parameter.pm index 0dc03baca..f36dfad00 100644 --- a/lib/MIP/Get/Parameter.pm +++ b/lib/MIP/Get/Parameter.pm @@ -17,146 +17,8 @@ BEGIN { # Functions and variables which can be optionally exported our @EXPORT_OK = qw{ - get_recipe_resources - get_recipe_attributes - }; -} - -sub get_recipe_attributes { - -## Function : Return recipe attributes -## Returns : $attribute | %attribute -## Arguments: $attribute => Attribute key -## : $parameter_href => Holds all parameters -## : $recipe_name => Recipe name - - my ($arg_href) = @_; - - ## Flatten argument(s) - my $attribute; - my $parameter_href; - my $recipe_name; - - my $tmpl = { - attribute => { - store => \$attribute, - strict_type => 1, - }, - parameter_href => { - default => {}, - defined => 1, - required => 1, - store => \$parameter_href, - strict_type => 1, - }, - recipe_name => { - defined => 1, - required => 1, - strict_type => 1, - store => \$recipe_name, - }, - }; - - check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - - ## Get attribute value - return $parameter_href->{$recipe_name}{$attribute} if ( defined $attribute and $attribute ); - - ## Get recipe attribute hash - return %{ $parameter_href->{$recipe_name} }; -} - -sub get_recipe_resources { -## Function : Return recipe resources -## Returns : $recipe_resource | %recipe_resource -## Arguments: $active_parameter_href => The active parameters for this analysis hash {REF} -## : $recipe_name => Recipe name -## : $recipe_resource => Recipe parameter key - - my ($arg_href) = @_; - - ## Flatten argument(s) - my $active_parameter_href; - my $recipe_name; - my $resource; - - my $tmpl = { - active_parameter_href => { - default => {}, - defined => 1, - required => 1, - store => \$active_parameter_href, - strict_type => 1, - }, - recipe_name => { - defined => 1, - required => 1, - store => \$recipe_name, - strict_type => 1, - }, - resource => { - allow => [qw{ core_number gpu_number load_env_ref memory time }], - store => \$resource, - strict_type => 1, - }, }; - - check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - - use MIP::Active_parameter qw{ get_package_env_cmds }; - use MIP::Environment::Cluster qw{ check_recipe_memory_allocation }; - - ## Initilize variable - my @source_environment_cmds = get_package_env_cmds( - { - active_parameter_href => $active_parameter_href, - package_name => $recipe_name, - } - ); - - my $core_number = $active_parameter_href->{recipe_core_number}{$recipe_name}; - my $process_memory = $active_parameter_href->{recipe_memory}{$recipe_name}; - my $memory; - - ## Multiply memory with processes that are to be launched in the recipe - if ( $process_memory and $core_number ) { - $memory = $process_memory * $core_number; - } - ## Set default recipe memory allocation if it hasn't been specified - elsif ( not $process_memory and $core_number ) { - $memory = $core_number * $active_parameter_href->{core_ram_memory}; - } - elsif ( not $process_memory and not $core_number ) { - $memory = $active_parameter_href->{core_ram_memory}; - } - else { - $memory = $process_memory; - } - - check_recipe_memory_allocation( - { - node_ram_memory => $active_parameter_href->{node_ram_memory}, - recipe_memory_allocation => $memory, - } - ); - - my %recipe_resource = ( - core_number => $core_number, - gpu_number => $active_parameter_href->{recipe_gpu_number}{$recipe_name}, - load_env_ref => \@source_environment_cmds, - memory => $memory, - time => $active_parameter_href->{recipe_time}{$recipe_name}, - ); - - ## Return specified recipe resource - if ( defined $resource && $resource ) { - return $recipe_resource{$resource}; - } - - ## Return recipe resource hash - return %recipe_resource; - } 1; diff --git a/lib/MIP/Parameter.pm b/lib/MIP/Parameter.pm index 441d8cc6c..fb1fc3e74 100644 --- a/lib/MIP/Parameter.pm +++ b/lib/MIP/Parameter.pm @@ -32,6 +32,7 @@ BEGIN { get_order_of_parameters get_parameter_attribute get_program_executables + get_recipe_attributes parse_reference_path parse_parameter_files parse_parameter_recipe_names @@ -422,6 +423,54 @@ sub get_program_executables { return uniq(@program_executables); } +sub get_recipe_attributes { + +## Function : Return recipe attributes +## Returns : $attribute | %attribute +## Arguments: $attribute => Attribute key +## : $parameter_href => Holds all parameters +## : $recipe_name => Recipe name + + my ($arg_href) = @_; + + ## Flatten argument(s) + my $attribute; + my $parameter_href; + my $recipe_name; + + my $tmpl = { + attribute => { + store => \$attribute, + strict_type => 1, + }, + parameter_href => { + default => {}, + defined => 1, + required => 1, + store => \$parameter_href, + strict_type => 1, + }, + recipe_name => { + defined => 1, + required => 1, + strict_type => 1, + store => \$recipe_name, + }, + }; + + check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; + + if ( not exists $parameter_href->{$recipe_name} ) { + croak(qq{Recipe name: $recipe_name. Does not exists in parameter hash}); + } + + ## Get attribute value + return $parameter_href->{$recipe_name}{$attribute} if ( defined $attribute and $attribute ); + + ## Get recipe attribute hash + return %{ $parameter_href->{$recipe_name} }; +} + sub parse_parameter_files { ## Function : Parse parameter file objects and checks that their paths exist diff --git a/lib/MIP/Recipe.pm b/lib/MIP/Recipe.pm index f07afb818..77c339ce2 100644 --- a/lib/MIP/Recipe.pm +++ b/lib/MIP/Recipe.pm @@ -62,7 +62,8 @@ sub parse_recipe_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Parameter qw{ get_recipe_attributes }; my %recipe_resource = get_recipe_resources( { diff --git a/t/get_recipe_attributes.t b/t/get_recipe_attributes.t index 4222fd640..7f8a29f88 100644 --- a/t/get_recipe_attributes.t +++ b/t/get_recipe_attributes.t @@ -16,6 +16,7 @@ use warnings qw{ FATAL utf8 }; ## CPANM use autodie qw { :all }; use Modern::Perl qw{ 2018 }; +use Test::Trap; ## MIPs lib/ use lib catdir( dirname($Bin), q{lib} ); @@ -29,14 +30,14 @@ BEGIN { ### Check all internal dependency modules and imports ## Modules with import my %perl_module = ( - q{MIP::Get::Parameter} => [qw{ get_recipe_attributes }], + q{MIP::Parameter} => [qw{ get_recipe_attributes }], q{MIP::Test::Fixtures} => [qw{ test_mip_hashes }], ); test_import( { perl_module_href => \%perl_module, } ); } -use MIP::Get::Parameter qw{ get_recipe_attributes }; +use MIP::Parameter qw{ get_recipe_attributes }; diag( q{Test get_recipe_attributes from Parameter.pm} . $COMMA @@ -47,7 +48,30 @@ diag( q{Test get_recipe_attributes from Parameter.pm} . $EXECUTABLE_NAME ); ## Given a program parameter -my %parameter = test_mip_hashes( { mip_hash_name => q{define_parameter}, } ); +my %parameter = test_mip_hashes( { mip_hash_name => q{define_parameter}, } ); + +## Given a recipe name that does not exist +my $faulty_recipe_name = q{recipe_name_does_not_exist}; + +## When getting the recipe attributes +trap { + get_recipe_attributes( + { + parameter_href => \%parameter, + recipe_name => $faulty_recipe_name, + } + ) +}; + +## Then exit and throw FATAL log message +is( $trap->leaveby, q{die}, q{Exit if the recipe name cannot be found} ); +like( + $trap->die, + qr/Does \s+ not \s+ exists \s+ in \s+ parameter \s+ hash/xms, + q{Throw error msg if the recipe name cannot be found} +); + +## Given a recipe name my $recipe_name = q{bwa_mem}; my %rec_atr = get_recipe_attributes( diff --git a/t/get_recipe_resources.t b/t/get_recipe_resources.t index cefafa0c8..6567b8a91 100644 --- a/t/get_recipe_resources.t +++ b/t/get_recipe_resources.t @@ -30,16 +30,16 @@ BEGIN { ### Check all internal dependency modules and imports ## Modules with import my %perl_module = ( - q{MIP::Get::Parameter} => [qw{ get_recipe_resources }], - q{MIP::Test::Fixtures} => [qw{ test_log test_mip_hashes }], + q{MIP::Active_parameter} => [qw{ get_recipe_resources }], + q{MIP::Test::Fixtures} => [qw{ test_log test_mip_hashes }], ); test_import( { perl_module_href => \%perl_module, } ); } -use MIP::Get::Parameter qw{ get_recipe_resources }; +use MIP::Active_parameter qw{ get_recipe_resources }; -diag( q{Test get_recipe_resources from Parameter.pm} +diag( q{Test get_recipe_resources from Active_parameter.pm} . $COMMA . $SPACE . q{Perl} . $SPACE @@ -47,7 +47,12 @@ diag( q{Test get_recipe_resources from Parameter.pm} . $SPACE . $EXECUTABLE_NAME ); -test_log( {} ); +## Constants +Readonly my $CORE_MEMORY => 5; +Readonly my $DEFAULT_MEMORY => 175; +Readonly my $RECIPE_CORE_MEMORY => 1; + +test_log( { no_screen => 1, } ); ## Given a recipe name and active parameter hash my %active_parameter = @@ -74,6 +79,7 @@ is_deeply( \%recipe_resource, \%expected, q{Got recipe resource hash} ); ## Given a request for a specific resource RESOURCE: foreach my $resource ( keys %expected ) { + my $recipe_resource = get_recipe_resources( { active_parameter_href => \%active_parameter, @@ -98,7 +104,6 @@ my $recipe_memory = get_recipe_resources( ); ## Then return 5 gigs times the number of cores -Readonly my $DEFAULT_MEMORY => 175; is( $recipe_memory, $DEFAULT_MEMORY, q{Got default memory} ); ## Given a recipe that lacks memory and core specification @@ -113,7 +118,6 @@ $recipe_memory = get_recipe_resources( ); ## Then return the core ram memory -Readonly my $CORE_MEMORY => 5; is( $recipe_memory, $CORE_MEMORY, q{Got core memory} ); ## Given a recipe that lacks core specification but has a memory specification @@ -128,7 +132,6 @@ $recipe_memory = get_recipe_resources( ); ## Then return the recipe ram memory -Readonly my $CORE_MEMORY_1 => 1; -is( $recipe_memory, $CORE_MEMORY_1, q{Got core memory} ); +is( $recipe_memory, $RECIPE_CORE_MEMORY, q{Got core memory} ); done_testing(); From 09fcb9371c455413bd881f5f7d3d743bd5efded9 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 07:41:26 +0100 Subject: [PATCH 3/7] fix(test): --- lib/MIP/Recipe.pm | 2 +- t/parse_recipe_prerequisites.t | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/MIP/Recipe.pm b/lib/MIP/Recipe.pm index 77c339ce2..baf2a8c4d 100644 --- a/lib/MIP/Recipe.pm +++ b/lib/MIP/Recipe.pm @@ -62,7 +62,7 @@ sub parse_recipe_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Active_parameter qw{ get_recipe_resources }; use MIP::Parameter qw{ get_recipe_attributes }; my %recipe_resource = get_recipe_resources( diff --git a/t/parse_recipe_prerequisites.t b/t/parse_recipe_prerequisites.t index 42491ab30..90dc03764 100644 --- a/t/parse_recipe_prerequisites.t +++ b/t/parse_recipe_prerequisites.t @@ -46,7 +46,7 @@ diag( q{Test parse_recipe_prerequisites from Recipe.pm} . $SPACE . $EXECUTABLE_NAME ); -test_log( {} ); +test_log( { no_screen => 1, } ); ## Given a recipe name and active parameter hash my $recipe_name = q{deepvariant}; From 116478749bd608f4c273803a7ff77f2eeb77ee39 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 07:46:59 +0100 Subject: [PATCH 4/7] feat(recipe_mode): Added recipe_mode --- lib/MIP/Active_parameter.pm | 3 ++- t/get_recipe_resources.t | 1 + t/parse_recipe_prerequisites.t | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/MIP/Active_parameter.pm b/lib/MIP/Active_parameter.pm index 9b7346894..59acf64a1 100644 --- a/lib/MIP/Active_parameter.pm +++ b/lib/MIP/Active_parameter.pm @@ -1086,7 +1086,7 @@ sub get_recipe_resources { strict_type => 1, }, resource => { - allow => [qw{ core_number gpu_number load_env_ref memory time }], + allow => [qw{ core_number gpu_number load_env_ref memory mode time }], store => \$resource, strict_type => 1, }, @@ -1127,6 +1127,7 @@ sub get_recipe_resources { gpu_number => $active_parameter_href->{recipe_gpu_number}{$recipe_name}, load_env_ref => \@environment_cmds, memory => $memory, + mode => $active_parameter_href->{$recipe_name}, time => $active_parameter_href->{recipe_time}{$recipe_name}, ); diff --git a/t/get_recipe_resources.t b/t/get_recipe_resources.t index 6567b8a91..60b2963db 100644 --- a/t/get_recipe_resources.t +++ b/t/get_recipe_resources.t @@ -72,6 +72,7 @@ my %expected = ( gpu_number => 1, load_env_ref => [qw{ conda activate test }], memory => 175, + mode => 2, time => 10, ); is_deeply( \%recipe_resource, \%expected, q{Got recipe resource hash} ); diff --git a/t/parse_recipe_prerequisites.t b/t/parse_recipe_prerequisites.t index 90dc03764..cbe138d8f 100644 --- a/t/parse_recipe_prerequisites.t +++ b/t/parse_recipe_prerequisites.t @@ -74,6 +74,7 @@ my %expected_recipe = ( job_id_chain => q{TEST}, load_env_ref => [qw{ conda activate test }], memory => 175, + mode => 2, time => 10, ); From d6d1b472c899624b6442d8c854acd71ead8a8a41 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 12:15:09 +0100 Subject: [PATCH 5/7] feat(recipe): Use parse_recipe_prerequisites to collect recipe prereqs and get recipe hash as single point of entry. --- lib/MIP/Get/Parameter.pm | 24 -- lib/MIP/Parse/File.pm | 2 +- lib/MIP/Recipe.pm | 24 +- lib/MIP/Recipes/Analysis/Analysisrunstatus.pm | 23 +- lib/MIP/Recipes/Analysis/Arriba.pm | 78 ++--- lib/MIP/Recipes/Analysis/Bcftools_merge.pm | 52 ++- lib/MIP/Recipes/Analysis/Blobfish.pm | 69 ++-- lib/MIP/Recipes/Analysis/BootstrapAnn.pm | 40 +-- lib/MIP/Recipes/Analysis/Bwa_mem.pm | 119 +++---- lib/MIP/Recipes/Analysis/Cadd.pm | 125 +++----- lib/MIP/Recipes/Analysis/Chanjo_sex_check.pm | 37 +-- lib/MIP/Recipes/Analysis/Chromograph.pm | 133 ++++---- lib/MIP/Recipes/Analysis/Cnvnator.pm | 70 ++--- lib/MIP/Recipes/Analysis/Deepvariant.pm | 47 ++- lib/MIP/Recipes/Analysis/Delly_call.pm | 37 +-- lib/MIP/Recipes/Analysis/Delly_reformat.pm | 70 ++--- lib/MIP/Recipes/Analysis/Dragen_dna.pm | 101 +++--- .../Analysis/Endvariantannotationblock.pm | 83 ++--- lib/MIP/Recipes/Analysis/Expansionhunter.pm | 90 +++--- lib/MIP/Recipes/Analysis/Fastqc.pm | 44 ++- lib/MIP/Recipes/Analysis/Frequency_filter.pm | 89 +++--- .../Recipes/Analysis/Gatk_asereadcounter.pm | 37 +-- .../Analysis/Gatk_baserecalibration.pm | 91 +++--- .../Recipes/Analysis/Gatk_cnnscorevariants.pm | 45 +-- .../Analysis/Gatk_combinevariantcallsets.pm | 46 ++- lib/MIP/Recipes/Analysis/Gatk_gathervcfs.pm | 48 ++- .../Recipes/Analysis/Gatk_genotypegvcfs.pm | 92 +++--- .../Recipes/Analysis/Gatk_haplotypecaller.pm | 52 ++- .../Recipes/Analysis/Gatk_splitncigarreads.pm | 47 ++- .../Recipes/Analysis/Gatk_variantevalall.pm | 61 ++-- .../Recipes/Analysis/Gatk_variantevalexome.pm | 61 ++-- .../Analysis/Gatk_variantfiltration.pm | 47 ++- .../Analysis/Gatk_variantrecalibration.pm | 161 ++++------ lib/MIP/Recipes/Analysis/Genebody_coverage.pm | 46 ++- lib/MIP/Recipes/Analysis/Gffcompare.pm | 49 ++- lib/MIP/Recipes/Analysis/Glnexus.pm | 53 ++-- lib/MIP/Recipes/Analysis/Gzip_fastq.pm | 25 +- lib/MIP/Recipes/Analysis/Manta.pm | 57 ++-- lib/MIP/Recipes/Analysis/Markduplicates.pm | 83 ++--- lib/MIP/Recipes/Analysis/Mip_qccollect.pm | 25 +- lib/MIP/Recipes/Analysis/Mip_vcfparser.pm | 296 +++++++----------- lib/MIP/Recipes/Analysis/Mip_vercollect.pm | 25 +- lib/MIP/Recipes/Analysis/Multiqc.pm | 28 +- lib/MIP/Recipes/Analysis/Peddy.pm | 37 +-- .../Analysis/Picardtools_collecthsmetrics.pm | 27 +- .../Picardtools_collectmultiplemetrics.pm | 44 +-- .../Picardtools_collectrnaseqmetrics.pm | 42 +-- .../Analysis/Picardtools_mergesamfiles.pm | 27 +- lib/MIP/Recipes/Analysis/Plink.pm | 29 +- .../Prepareforvariantannotationblock.pm | 37 +-- lib/MIP/Recipes/Analysis/Preseq.pm | 37 +-- lib/MIP/Recipes/Analysis/Rankvariant.pm | 245 ++++++--------- lib/MIP/Recipes/Analysis/Rhocall.pm | 98 +++--- lib/MIP/Recipes/Analysis/Rseqc.pm | 58 ++-- lib/MIP/Recipes/Analysis/Rtg_vcfeval.pm | 27 +- lib/MIP/Recipes/Analysis/Sacct.pm | 23 +- lib/MIP/Recipes/Analysis/Salmon_quant.pm | 43 ++- lib/MIP/Recipes/Analysis/Sambamba_depth.pm | 37 +-- lib/MIP/Recipes/Analysis/Samtools_merge.pm | 54 ++-- .../Recipes/Analysis/Samtools_subsample_mt.pm | 40 +-- .../Recipes/Analysis/Smncopynumbercaller.pm | 47 ++- lib/MIP/Recipes/Analysis/Split_fastq_file.pm | 28 +- lib/MIP/Recipes/Analysis/Star_aln.pm | 152 ++++----- lib/MIP/Recipes/Analysis/Star_caller.pm | 39 +-- lib/MIP/Recipes/Analysis/Star_fusion.pm | 48 ++- lib/MIP/Recipes/Analysis/Stringtie.pm | 51 ++- lib/MIP/Recipes/Analysis/Sv_annotate.pm | 83 +++-- .../Analysis/Sv_combinevariantcallsets.pm | 60 ++-- lib/MIP/Recipes/Analysis/Sv_reformat.pm | 56 ++-- lib/MIP/Recipes/Analysis/Telomerecat.pm | 40 +-- lib/MIP/Recipes/Analysis/Tiddit.pm | 55 ++-- lib/MIP/Recipes/Analysis/Tiddit_coverage.pm | 45 ++- lib/MIP/Recipes/Analysis/Trim_galore.pm | 59 ++-- lib/MIP/Recipes/Analysis/Upd.pm | 48 ++- lib/MIP/Recipes/Analysis/Varg.pm | 37 +-- .../Recipes/Analysis/Variant_annotation.pm | 57 ++-- lib/MIP/Recipes/Analysis/Vcf2cytosure.pm | 76 ++--- lib/MIP/Recipes/Analysis/Vcf_ase_reformat.pm | 37 +-- .../Recipes/Analysis/Vcf_rerun_reformat.pm | 78 ++--- lib/MIP/Recipes/Analysis/Vep.pm | 100 +++--- lib/MIP/Recipes/Analysis/Vt.pm | 85 +++-- lib/MIP/Recipes/Analysis/Vt_core.pm | 52 ++- lib/MIP/Recipes/Build/Bwa_prerequisites.pm | 28 +- .../Build/Capture_file_prerequisites.pm | 12 +- .../Build/Human_genome_prerequisites.pm | 12 +- lib/MIP/Recipes/Build/Rtg_prerequisites.pm | 13 +- .../Build/Salmon_quant_prerequisites.pm | 19 +- .../Build/Star_fusion_prerequisites.pm | 15 +- lib/MIP/Recipes/Build/Star_prerequisites.pm | 19 +- .../Transcript_annotation_prerequisites.pm | 13 +- lib/MIP/Recipes/Download/1000g_indels.pm | 19 +- lib/MIP/Recipes/Download/1000g_omni.pm | 19 +- lib/MIP/Recipes/Download/1000g_sites.pm | 21 +- lib/MIP/Recipes/Download/1000g_snps.pm | 19 +- .../Download/Cadd_offline_annotations.pm | 21 +- .../Recipes/Download/Cadd_to_vcf_header.pm | 21 +- .../Download/Cadd_whole_genome_snvs.pm | 19 +- .../Recipes/Download/Chromograph_cytoband.pm | 21 +- lib/MIP/Recipes/Download/Clinvar.pm | 21 +- lib/MIP/Recipes/Download/Ctat_resource_lib.pm | 21 +- lib/MIP/Recipes/Download/Dbnsfp.pm | 21 +- lib/MIP/Recipes/Download/Dbsnp.pm | 19 +- lib/MIP/Recipes/Download/Delly_exclude.pm | 21 +- lib/MIP/Recipes/Download/Expansionhunter.pm | 21 +- .../Download/Gatk_mitochondrial_ref.pm | 21 +- .../Recipes/Download/Genbank_haplogroup.pm | 21 +- .../Recipes/Download/Gencode_annotation.pm | 21 +- lib/MIP/Recipes/Download/Genomic_superdups.pm | 44 ++- lib/MIP/Recipes/Download/Giab.pm | 21 +- lib/MIP/Recipes/Download/Gnomad.pm | 58 ++-- .../Recipes/Download/Gnomad_pli_per_gene.pm | 21 +- lib/MIP/Recipes/Download/Hapmap.pm | 19 +- lib/MIP/Recipes/Download/Human_reference.pm | 48 ++- .../Recipes/Download/Manta_call_regions.pm | 21 +- .../Download/Mills_and_1000g_indels.pm | 23 +- lib/MIP/Recipes/Download/Pfam.pm | 40 ++- lib/MIP/Recipes/Download/Rank_model.pm | 19 +- .../Recipes/Download/Reduced_penetrance.pm | 21 +- lib/MIP/Recipes/Download/Runstatus.pm | 19 +- lib/MIP/Recipes/Download/Scout_exons.pm | 21 +- lib/MIP/Recipes/Download/Sv_vcfanno_config.pm | 21 +- lib/MIP/Recipes/Download/Svrank_model.pm | 19 +- .../Vcf2cytosure_blacklist_regions.pm | 21 +- lib/MIP/Recipes/Download/Vcfanno_config.pm | 21 +- lib/MIP/Recipes/Download/Vcfanno_functions.pm | 21 +- t/build_capture_file_prerequisites.t | 4 +- t/build_transcript_annotation_prerequisites.t | 6 +- t/data/test_data/recipe_active_parameter.yaml | 1 + t/data/test_data/recipe_parameter.yaml | 2 + t/parse_recipe_prerequisites.t | 22 +- 130 files changed, 2465 insertions(+), 3596 deletions(-) delete mode 100644 lib/MIP/Get/Parameter.pm diff --git a/lib/MIP/Get/Parameter.pm b/lib/MIP/Get/Parameter.pm deleted file mode 100644 index f36dfad00..000000000 --- a/lib/MIP/Get/Parameter.pm +++ /dev/null @@ -1,24 +0,0 @@ -package MIP::Get::Parameter; - -use 5.026; -use Carp; -use charnames qw{ :full :short }; -use English qw{ -no_match_vars }; -use File::Spec::Functions qw{ catdir }; -use open qw{ :encoding(UTF-8) :std }; -use Params::Check qw{ allow check last_error }; -use utf8; -use warnings; -use warnings qw{ FATAL utf8 }; - -BEGIN { - require Exporter; - use base qw{ Exporter }; - - # Functions and variables which can be optionally exported - our @EXPORT_OK = qw{ - - }; -} - -1; diff --git a/lib/MIP/Parse/File.pm b/lib/MIP/Parse/File.pm index 45297b0ad..d9b91a3b4 100644 --- a/lib/MIP/Parse/File.pm +++ b/lib/MIP/Parse/File.pm @@ -134,7 +134,7 @@ sub parse_io_outfiles { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes }; + use MIP::Parameter qw{ get_recipe_attributes }; use MIP::Set::File qw{ set_io_files }; my @file_paths = @{$file_paths_ref}; diff --git a/lib/MIP/Recipe.pm b/lib/MIP/Recipe.pm index baf2a8c4d..74019ec18 100644 --- a/lib/MIP/Recipe.pm +++ b/lib/MIP/Recipe.pm @@ -48,7 +48,6 @@ sub parse_recipe_prerequisites { parameter_href => { default => {}, defined => 1, - required => 1, store => \$parameter_href, strict_type => 1, }, @@ -71,13 +70,24 @@ sub parse_recipe_prerequisites { recipe_name => $recipe_name, } ); - $recipe_resource{job_id_chain} = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } + + return %recipe_resource if ( not %{$parameter_href} ); + + my %attribute_map = ( + chain => q{job_id_chain}, + file_tag => q{file_tag}, + outfile_suffix => q{outfile_suffix}, ); + while ( my ( $attribute, $resource ) = each %attribute_map ) { + + $recipe_resource{$resource} = get_recipe_attributes( + { + parameter_href => $parameter_href, + recipe_name => $recipe_name, + attribute => $attribute, + } + ); + } return %recipe_resource; } diff --git a/lib/MIP/Recipes/Analysis/Analysisrunstatus.pm b/lib/MIP/Recipes/Analysis/Analysisrunstatus.pm index 767a2e3dc..6768f6405 100644 --- a/lib/MIP/Recipes/Analysis/Analysisrunstatus.pm +++ b/lib/MIP/Recipes/Analysis/Analysisrunstatus.pm @@ -112,9 +112,9 @@ sub analysis_analysisrunstatus { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Language::Shell qw{ check_mip_process_paths }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_path_entries }; use MIP::Script::Setup_script qw{ setup_script }; @@ -124,14 +124,11 @@ sub analysis_analysisrunstatus { ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my %recipe_resource = get_recipe_resources( +## Unpack parameters + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -144,15 +141,15 @@ sub analysis_analysisrunstatus { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory_allocation}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory_allocation}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -235,14 +232,14 @@ sub analysis_analysisrunstatus { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, dependency_method => q{add_to_all}, job_dependency_type => q{afterok}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Arriba.pm b/lib/MIP/Recipes/Analysis/Arriba.pm index 8ec9262d0..21c6d67d4 100644 --- a/lib/MIP/Recipes/Analysis/Arriba.pm +++ b/lib/MIP/Recipes/Analysis/Arriba.pm @@ -17,8 +17,7 @@ use autodie qw{ :all }; use Readonly; ## MIPs lib/ -use MIP::Constants - qw{ $COMMA $DOT $EMPTY_STR $LOG_NAME $NEWLINE $PIPE $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $COMMA $DOT $EMPTY_STR $LOG_NAME $NEWLINE $PIPE $SPACE $UNDERSCORE }; BEGIN { @@ -135,13 +134,13 @@ sub analysis_arriba { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_rm gnu_tee }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Arriba qw{ arriba draw_fusions }; use MIP::Program::Sambamba qw{ sambamba_index sambamba_sort }; use MIP::Program::Star qw{ star_aln }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_file_path_to_store @@ -167,21 +166,20 @@ sub analysis_arriba { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Build outfile_paths - my %rec_atr = get_recipe_attributes( +## Build outfile_paths + my %recipe = parse_recipe_prerequisites( { - parameter_href => $parameter_href, - recipe_name => $recipe_name, + active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, + recipe_name => $recipe_name, } ); - my $job_id_chain = $rec_atr{chain}; + my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $lanes_id = join $EMPTY_STR, @{ $file_info_href->{$sample_id}{lanes} }; my $outfile_tag = $file_info_href->{$sample_id}{$recipe_name}{file_tag}; - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outfile_path_prefix = catfile( $outsample_directory, $sample_id . $UNDERSCORE . q{lanes} . $UNDERSCORE . $lanes_id . $outfile_tag ); @@ -189,7 +187,7 @@ sub analysis_arriba { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => [ $outfile_path_prefix . $outfile_suffix ], @@ -201,13 +199,6 @@ sub analysis_arriba { my $outfile_name = ${ $io{out}{file_names} }[0]; my $outfile_path = $io{out}{file_path}; - my %recipe_resource = get_recipe_resources( - { - active_parameter_href => $active_parameter_href, - recipe_name => $recipe_name, - } - ); - ## Filehandles # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -216,12 +207,12 @@ sub analysis_arriba { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -315,11 +306,11 @@ sub analysis_arriba { out_sam_attr_rgline => $out_sam_attr_rgline, out_sam_type => q{BAM Unsorted}, out_sam_unmapped => q{Within}, - pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, - quant_mode => q{-}, - stdout_data_type => q{BAM_Unsorted}, - thread_number => $recipe_resource{core_number}, - two_pass_mode => q{None}, + pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, + quant_mode => q{-}, + stdout_data_type => q{BAM_Unsorted}, + thread_number => $recipe{core_number}, + two_pass_mode => q{None}, }, ); push @arriba_commands, $PIPE; @@ -366,7 +357,7 @@ sub analysis_arriba { { filehandle => $filehandle, infile_path => $star_outfile_path, - memory_limit => $recipe_resource{memory} . q{G}, + memory_limit => $recipe{memory} . q{G}, outfile_path => $sorted_bam_file, temp_directory => $temp_directory, } @@ -394,14 +385,13 @@ sub analysis_arriba { my $report_path = $outfile_path_prefix . $DOT . q{pdf}; draw_fusions( { - alignment_file_path => $sorted_bam_file, - annotation_file_path => $active_parameter_href->{transcript_annotation}, - cytoband_file_path => $active_parameter_href->{arriba_cytoband_path}, - filehandle => $filehandle, - fusion_file_path => $outfile_path, - outfile_path => $report_path, - protein_domain_file_path => - $active_parameter_href->{arriba_protein_domain_path}, + alignment_file_path => $sorted_bam_file, + annotation_file_path => $active_parameter_href->{transcript_annotation}, + cytoband_file_path => $active_parameter_href->{arriba_cytoband_path}, + filehandle => $filehandle, + fusion_file_path => $outfile_path, + outfile_path => $report_path, + protein_domain_file_path => $active_parameter_href->{arriba_protein_domain_path}, } ); say {$filehandle} $NEWLINE; @@ -409,7 +399,7 @@ sub analysis_arriba { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -454,13 +444,13 @@ sub analysis_arriba { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Bcftools_merge.pm b/lib/MIP/Recipes/Analysis/Bcftools_merge.pm index 09dcceb7f..a10f05dc8 100644 --- a/lib/MIP/Recipes/Analysis/Bcftools_merge.pm +++ b/lib/MIP/Recipes/Analysis/Bcftools_merge.pm @@ -114,11 +114,11 @@ sub analysis_bcftools_merge { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_merge bcftools_view_and_index_vcf }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -156,17 +156,10 @@ sub analysis_bcftools_merge { push @infile_path_prefixes, $io{in}{file_path_prefix}; } - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -174,7 +167,7 @@ sub analysis_bcftools_merge { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -192,15 +185,15 @@ sub analysis_bcftools_merge { ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => $case_id, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, - recipe_directory => $recipe_name, - recipe_name => $recipe_name, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => $case_id, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, + recipe_directory => $recipe_name, + recipe_name => $recipe_name, } ); @@ -242,8 +235,7 @@ sub analysis_bcftools_merge { } ## Otherwise just rename the sample else { - say {$filehandle} - q{## Renaming single sample case to facilitate downstream processing}; + say {$filehandle} q{## Renaming single sample case to facilitate downstream processing}; gnu_cp( { @@ -257,7 +249,7 @@ sub analysis_bcftools_merge { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -280,13 +272,13 @@ sub analysis_bcftools_merge { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Blobfish.pm b/lib/MIP/Recipes/Analysis/Blobfish.pm index 360f00b58..955b2540e 100644 --- a/lib/MIP/Recipes/Analysis/Blobfish.pm +++ b/lib/MIP/Recipes/Analysis/Blobfish.pm @@ -17,8 +17,7 @@ use List::MoreUtils qw { uniq }; use Readonly; ## MIPs lib/ -use MIP::Constants - qw{ $ASTERISK $AMPERSAND $COLON $DOT $LOG_NAME $NEWLINE $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $ASTERISK $AMPERSAND $COLON $DOT $LOG_NAME $NEWLINE $SPACE $UNDERSCORE }; BEGIN { @@ -121,10 +120,10 @@ sub analysis_blobfish { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Blobfish qw{ blobfish_allvsall }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -157,21 +156,13 @@ sub analysis_blobfish { } ## Get outdir_path - my $outdir_path = - catdir( $active_parameter_href->{outdata_dir}, $case_id, $recipe_name ); + my $outdir_path = catdir( $active_parameter_href->{outdata_dir}, $case_id, $recipe_name ); ## Get recipe attributes and parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -183,16 +174,16 @@ sub analysis_blobfish { ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => $case_id, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, - recipe_directory => $recipe_name, - recipe_name => $recipe_name, - temp_directory => $temp_directory, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => $case_id, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, + recipe_directory => $recipe_name, + recipe_name => $recipe_name, + temp_directory => $temp_directory, } ); @@ -222,16 +213,12 @@ sub analysis_blobfish { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use my @conditions = uniq @sample_phenotypes; my $de_outfile_name = - $conditions[0] - . $UNDERSCORE . q{vs} - . $UNDERSCORE - . $conditions[1] - . q{.results.tsv}; + $conditions[0] . $UNDERSCORE . q{vs} . $UNDERSCORE . $conditions[1] . q{.results.tsv}; set_recipe_outfile_in_sample_info( { path => catfile( $outdir_path, $de_outfile_name ), @@ -252,13 +239,13 @@ sub analysis_blobfish { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{case_to_island}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{case_to_island}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -306,15 +293,14 @@ sub _generate_tx2gene_file { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Environment::Executable qw{ get_executable_base_command }; + use MIP::Environment::Executable qw{ get_executable_base_command }; my @commands = ( get_executable_base_command( { base_command => q{perl}, } ), ); # Execute perl my $tx2gene_generator = join $SPACE, @commands; ## Print header and initiate hash - $tx2gene_generator .= - q? -nae 'BEGIN {print q{TXNAME,GENEID} . qq{\n}; %txgene;}?; + $tx2gene_generator .= q? -nae 'BEGIN {print q{TXNAME,GENEID} . qq{\n}; %txgene;}?; ## When the file has been processed; print hash $tx2gene_generator .= q? END {foreach $tx (keys %txgene){print $tx . q{,} . $txgene{$tx} .qq{\n}; } }?; @@ -323,8 +309,7 @@ sub _generate_tx2gene_file { ## Check for keywords in attribute field $tx2gene_generator .= q? if (($F[8] =~ /gene_id/) and ($F[10] =~ /transcript_id/))?; ## Capture gene, transcript id and remove [;"] from the names - $tx2gene_generator .= - q? {$gene = $F[9]; $gene =~ tr/[;"]//d; $tx = $F[11]; $tx =~tr/[;"]//d;?; + $tx2gene_generator .= q? {$gene = $F[9]; $gene =~ tr/[;"]//d; $tx = $F[11]; $tx =~tr/[;"]//d;?; ## Store in hash $tx2gene_generator .= q? $txgene{$tx} = $gene;} else{next;}'?; diff --git a/lib/MIP/Recipes/Analysis/BootstrapAnn.pm b/lib/MIP/Recipes/Analysis/BootstrapAnn.pm index 60a3b59ef..fe89e451d 100644 --- a/lib/MIP/Recipes/Analysis/BootstrapAnn.pm +++ b/lib/MIP/Recipes/Analysis/BootstrapAnn.pm @@ -127,7 +127,7 @@ sub analysis_bootstrapann { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::BootstrapAnn qw{ bootstrapann }; @@ -179,19 +179,11 @@ sub analysis_bootstrapann { ); my $variant_infile_path = $variant_io{out}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Get module parameters - my %recipe_resource = get_recipe_resources( +## Get module parameters + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -202,7 +194,7 @@ sub analysis_bootstrapann { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@ase_infile_name_prefixes, @@ -224,12 +216,12 @@ sub analysis_bootstrapann { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -252,7 +244,7 @@ sub analysis_bootstrapann { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -277,13 +269,13 @@ sub analysis_bootstrapann { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_sample}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_sample}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Bwa_mem.pm b/lib/MIP/Recipes/Analysis/Bwa_mem.pm index 5f8b09da9..b36be5fe4 100644 --- a/lib/MIP/Recipes/Analysis/Bwa_mem.pm +++ b/lib/MIP/Recipes/Analysis/Bwa_mem.pm @@ -127,28 +127,25 @@ sub analysis_bwa_mem { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::File_info qw{get_sample_file_attribute}; - use MIP::Get::File qw{get_io_files}; - use MIP::Get::Parameter qw{get_recipe_attributes get_recipe_resources}; - use MIP::Parse::File qw{parse_io_outfiles}; - use MIP::Processmanagement::Processes qw{submit_recipe}; - use MIP::Program::Bwa qw{bwa_mem}; - use MIP::Program::Samtools qw{ samtools_index samtools_stats samtools_sort samtools_view}; + use MIP::File_info qw{ get_sample_file_attribute }; + use MIP::Get::File qw{ get_io_files }; + use MIP::Parse::File qw{ parse_io_outfiles }; + use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Program::Bwa qw{ bwa_mem }; + use MIP::Program::Samtools qw{ samtools_index samtools_stats samtools_sort samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_recipe_metafile_in_sample_info - set_recipe_outfile_in_sample_info}; - use MIP::Script::Setup_script qw{setup_script}; + set_recipe_outfile_in_sample_info }; + use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - ## Set MIP recipe name - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters +## Unpack parameters ## Get the io infiles per chain and id my %io = get_io_files( { @@ -161,17 +158,11 @@ sub analysis_bwa_mem { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -187,7 +178,7 @@ sub analysis_bwa_mem { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => $file_info_sample{no_direction_infile_prefixes}, @@ -245,14 +236,14 @@ sub analysis_bwa_mem { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -300,7 +291,7 @@ sub analysis_bwa_mem { read_group_header => $rg_header_line, soft_clip_sup_align => $active_parameter_href->{bwa_soft_clip_sup_align}, second_infile_path => $second_fastq_file_path, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); @@ -312,7 +303,7 @@ sub analysis_bwa_mem { auto_detect_input_format => 1, filehandle => $filehandle, infile_path => q{-}, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, uncompressed_bam_output => $uncompressed_bam_output, with_header => 1, } @@ -335,7 +326,7 @@ sub analysis_bwa_mem { outfile_path => $outfile_path, output_format => $output_format, temp_file_path_prefix => catfile( $temp_directory, q{samtools_sort_temp} ), - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); say {$filehandle} $NEWLINE; @@ -391,7 +382,7 @@ sub analysis_bwa_mem { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { if ( $active_parameter_href->{bwa_mem_cram} and $outfile_suffix ne q{.cram} ) @@ -441,7 +432,7 @@ sub analysis_bwa_mem { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample_parallel}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -558,11 +549,11 @@ sub analysis_bwa_mem2 { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bwa qw{ bwa_mem2_mem }; use MIP::Program::Samtools qw{ samtools_index samtools_stats samtools_sort samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_recipe_metafile_in_sample_info @@ -574,10 +565,7 @@ sub analysis_bwa_mem2 { ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - ## Set MIP recipe name - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters +## Unpack parameters ## Get the io infiles per chain and id my %io = get_io_files( { @@ -590,17 +578,11 @@ sub analysis_bwa_mem2 { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -616,7 +598,7 @@ sub analysis_bwa_mem2 { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => $file_info_sample{no_direction_infile_prefixes}, @@ -674,14 +656,14 @@ sub analysis_bwa_mem2 { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -731,7 +713,7 @@ sub analysis_bwa_mem2 { read_group_header => $rg_header_line, soft_clip_sup_align => $active_parameter_href->{bwa_soft_clip_sup_align}, second_infile_path => $second_fastq_file_path, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); @@ -744,7 +726,7 @@ sub analysis_bwa_mem2 { filehandle => $filehandle, infile_path => q{-}, outfile_path => $samtools_view_outfile_path, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, uncompressed_bam_output => $uncompressed_bam_output, with_header => 1, } @@ -766,7 +748,7 @@ sub analysis_bwa_mem2 { outfile_path => $outfile_path, output_format => $output_format, temp_file_path_prefix => catfile( $temp_directory, q{samtools_sort_temp} ), - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); say {$filehandle} $NEWLINE; @@ -822,7 +804,7 @@ sub analysis_bwa_mem2 { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { if ( $active_parameter_href->{bwa_mem_cram} and $outfile_suffix ne q{.cram} ) @@ -872,7 +854,7 @@ sub analysis_bwa_mem2 { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample_parallel}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -990,11 +972,11 @@ sub analysis_run_bwa_mem { use MIP::Environment::Executable qw{ get_executable_base_command }; use MIP::File_info qw{get_sample_file_attribute}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bwa qw{ bwa_mem run_bwamem }; use MIP::Program::Samtools qw{ samtools_index samtools_stats samtools_sort samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_recipe_metafile_in_sample_info @@ -1006,10 +988,7 @@ sub analysis_run_bwa_mem { ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - ## Set MIP recipe name - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters +## Unpack parameters ## Get the io infiles per chain and id my %io = get_io_files( { @@ -1023,17 +1002,11 @@ sub analysis_run_bwa_mem { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -1049,7 +1022,7 @@ sub analysis_run_bwa_mem { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => $file_info_sample{no_direction_infile_prefixes}, @@ -1106,14 +1079,14 @@ sub analysis_run_bwa_mem { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -1160,7 +1133,7 @@ sub analysis_run_bwa_mem { outfiles_prefix_path => $outfile_path_prefix, read_group_header => $rg_header_line, second_infile_path => $second_fastq_file_path, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); print {$filehandle} $PIPE . $SPACE; @@ -1183,7 +1156,7 @@ sub analysis_run_bwa_mem { outfile_path => $outfile_path, output_format => $output_format, temp_file_path_prefix => catfile( $temp_directory, q{samtools_sort_temp} ), - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, write_index => 1, } ); @@ -1240,7 +1213,7 @@ sub analysis_run_bwa_mem { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { if ( $active_parameter_href->{bwa_mem_cram} and $outfile_suffix ne q{.cram} ) @@ -1290,7 +1263,7 @@ sub analysis_run_bwa_mem { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample_parallel}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Cadd.pm b/lib/MIP/Recipes/Analysis/Cadd.pm index 929122dfd..46b88fa1c 100644 --- a/lib/MIP/Recipes/Analysis/Cadd.pm +++ b/lib/MIP/Recipes/Analysis/Cadd.pm @@ -119,7 +119,7 @@ sub analysis_cadd { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Bash qw{ gnu_cd gnu_export gnu_unset }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -150,28 +150,20 @@ sub analysis_cadd { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $human_genome_reference_version = - $file_info_href->{human_genome_reference_version}; - my $assembly_version = _get_cadd_reference_param( + my $human_genome_reference_version = $file_info_href->{human_genome_reference_version}; + my $assembly_version = _get_cadd_reference_param( { reference_source => $file_info_href->{human_genome_reference_source}, reference_version => $human_genome_reference_version, } ); - my $cadd_columns_name = join $COMMA, @{ $active_parameter_href->{cadd_column_names} }; + my $cadd_columns_name = join $COMMA, @{ $active_parameter_href->{cadd_column_names} }; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -181,7 +173,7 @@ sub analysis_cadd { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -199,8 +191,7 @@ sub analysis_cadd { my %outfile_path = %{ $io{out}{file_path_href} }; my @outfile_paths = @{ $io{out}{file_paths} }; my %temp_outdir_path = - map { $_ => catdir( $active_parameter_href->{temp_directory}, $_ ) } - @contigs_size_ordered; + map { $_ => catdir( $active_parameter_href->{temp_directory}, $_ ) } @contigs_size_ordered; ## Filehandles # Create anonymous filehandle @@ -210,15 +201,15 @@ sub analysis_cadd { ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => $case_id, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, - recipe_directory => $recipe_name, - recipe_name => $recipe_name, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => $case_id, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, + recipe_directory => $recipe_name, + recipe_name => $recipe_name, } ); @@ -229,8 +220,7 @@ sub analysis_cadd { my $bash_variable = q{MIP_BIND} . $EQUALS - . catdir( $active_parameter_href->{reference_dir}, - qw{ CADD-scripts data annotations} ); + . catdir( $active_parameter_href->{reference_dir}, qw{ CADD-scripts data annotations} ); gnu_export( { bash_variable => $bash_variable, @@ -259,7 +249,7 @@ sub analysis_cadd { ## Create file commands for xargs my ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -275,8 +265,7 @@ sub analysis_cadd { ## Get parameters $cadd_outfile_path{$contig} = catfile( $temp_outdir_path{$contig}, $outfile_name_prefix . $DOT . $contig . $DOT . q{tsv.gz} ); - my $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + my $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; my $view_outfile_path = $outfile_path_prefix . $UNDERSCORE . q{view} . $DOT . $contig . $outfile_suffix; my $view_infile_path = $infile_path{$contig}; @@ -288,8 +277,7 @@ sub analysis_cadd { { filehandle => $xargsfilehandle, infile_paths_ref => [ - $infile_path{ $contigs_size_ordered[ $index - 1 ] }, - $infile_path{$contig} + $infile_path{ $contigs_size_ordered[ $index - 1 ] }, $infile_path{$contig} ], outfile_path => $view_infile_path, output_type => q{v}, @@ -350,7 +338,7 @@ sub analysis_cadd { ## Create file commands for xargs ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -364,8 +352,7 @@ sub analysis_cadd { foreach my $contig (@contigs_size_ordered) { ## Get parameters - my $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + my $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; ## Parse outfile in case of grch38 my $cadd_outfile_path = _parse_cadd_outfile( @@ -420,7 +407,7 @@ sub analysis_cadd { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -433,13 +420,13 @@ sub analysis_cadd { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -535,7 +522,7 @@ sub analysis_cadd_panel { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Bash qw{ gnu_export gnu_unset }; use MIP::Language::Perl qw{ perl_nae_oneliners }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -565,9 +552,8 @@ sub analysis_cadd_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $human_genome_reference_version = - $file_info_href->{human_genome_reference_version}; - my $assembly_version = _get_cadd_reference_param( + my $human_genome_reference_version = $file_info_href->{human_genome_reference_version}; + my $assembly_version = _get_cadd_reference_param( { reference_source => $file_info_href->{human_genome_reference_source}, reference_version => $human_genome_reference_version, @@ -575,17 +561,10 @@ sub analysis_cadd_panel { ); my $cadd_columns_name = join $COMMA, @{ $active_parameter_href->{cadd_column_names} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -595,7 +574,7 @@ sub analysis_cadd_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -618,15 +597,15 @@ sub analysis_cadd_panel { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -638,8 +617,7 @@ sub analysis_cadd_panel { my $bash_variable = q{MIP_BIND} . $EQUALS - . catdir( $active_parameter_href->{reference_dir}, - qw{ CADD-scripts data annotations} ); + . catdir( $active_parameter_href->{reference_dir}, qw{ CADD-scripts data annotations} ); gnu_export( { bash_variable => $bash_variable, @@ -656,8 +634,7 @@ sub analysis_cadd_panel { reference_version => $file_info_href->{human_genome_reference_version}, } ); - my $view_outfile_path = - $outfile_path_prefix . $UNDERSCORE . q{view} . $outfile_suffix; + my $view_outfile_path = $outfile_path_prefix . $UNDERSCORE . q{view} . $outfile_suffix; bcftools_view( { filehandle => $filehandle, @@ -731,7 +708,7 @@ sub analysis_cadd_panel { ## Close filehandles close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -744,13 +721,13 @@ sub analysis_cadd_panel { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Chanjo_sex_check.pm b/lib/MIP/Recipes/Analysis/Chanjo_sex_check.pm index 4c1a0a8d8..7e9933dba 100644 --- a/lib/MIP/Recipes/Analysis/Chanjo_sex_check.pm +++ b/lib/MIP/Recipes/Analysis/Chanjo_sex_check.pm @@ -121,10 +121,10 @@ sub analysis_chanjo_sex_check { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Chanjo qw{ chanjo_sex }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; @@ -152,17 +152,10 @@ sub analysis_chanjo_sex_check { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -171,7 +164,7 @@ sub analysis_chanjo_sex_check { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -197,12 +190,12 @@ sub analysis_chanjo_sex_check { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, job_id_href => $job_id_href, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -234,7 +227,7 @@ sub analysis_chanjo_sex_check { say {$filehandle} $NEWLINE; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -268,13 +261,13 @@ sub analysis_chanjo_sex_check { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_island}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_island}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Chromograph.pm b/lib/MIP/Recipes/Analysis/Chromograph.pm index d22bb196c..df55eba2d 100644 --- a/lib/MIP/Recipes/Analysis/Chromograph.pm +++ b/lib/MIP/Recipes/Analysis/Chromograph.pm @@ -125,7 +125,7 @@ sub analysis_chromograph_cov { use MIP::Contigs qw{ delete_contig_elements }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Chromograph qw{ chromograph }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -151,17 +151,10 @@ sub analysis_chromograph_cov { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -178,7 +171,7 @@ sub analysis_chromograph_cov { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@outfile_name_prefixes, @@ -199,12 +192,12 @@ sub analysis_chromograph_cov { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -226,7 +219,7 @@ sub analysis_chromograph_cov { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -255,13 +248,13 @@ sub analysis_chromograph_cov { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -366,7 +359,7 @@ sub analysis_chromograph_rhoviz { use MIP::File::Path qw{ remove_file_path_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Chromograph qw{ chromograph }; @@ -392,8 +385,8 @@ sub analysis_chromograph_rhoviz { ); my $infile_name_prefix = remove_file_path_suffix( { - file_path => $io{in}{file_names}[0], - file_suffixes_ref => [$io{in}{file_suffix}], + file_path => $io{in}{file_names}[0], + file_suffixes_ref => [ $io{in}{file_suffix} ], } ); my %infile_path = _build_infile_path_hash( @@ -403,17 +396,10 @@ sub analysis_chromograph_rhoviz { } ); - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -429,7 +415,7 @@ sub analysis_chromograph_rhoviz { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@outfile_name_prefixes, @@ -455,15 +441,15 @@ sub analysis_chromograph_rhoviz { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -516,7 +502,7 @@ sub analysis_chromograph_rhoviz { # Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -539,12 +525,12 @@ sub analysis_chromograph_rhoviz { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -649,7 +635,7 @@ sub analysis_chromograph_upd { use MIP::Contigs qw{ delete_contig_elements }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Pedigree qw{ is_sample_proband_in_trio }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -688,17 +674,10 @@ sub analysis_chromograph_upd { map { s/$io{in}{file_suffix}//xmsr } @{ $io{in}{file_names} }; my $infile_path_href = $io{in}{file_path_href}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -714,14 +693,13 @@ sub analysis_chromograph_upd { foreach my $infile_name_prefix (@infile_name_prefixes) { - push @outfile_name_prefixes, - map { $infile_name_prefix . $UNDERSCORE . $_ } @contigs; + push @outfile_name_prefixes, map { $infile_name_prefix . $UNDERSCORE . $_ } @contigs; } %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@outfile_name_prefixes, @@ -742,15 +720,15 @@ sub analysis_chromograph_upd { ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => $sample_id, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, - recipe_directory => $recipe_name, - recipe_name => $recipe_name, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => $sample_id, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, + recipe_directory => $recipe_name, + recipe_name => $recipe_name, } ); @@ -786,7 +764,7 @@ sub analysis_chromograph_upd { # Close filehandles close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -816,12 +794,12 @@ sub analysis_chromograph_upd { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -867,7 +845,7 @@ sub _build_infile_path_hash { my $infile_path_prefix = remove_file_path_suffix( { - file_path => $file_path, + file_path => $file_path, file_suffixes_ref => [$file_suffix], } ); @@ -933,8 +911,7 @@ sub _build_outfile_name_prefixes { foreach my $infile_type ( keys %{$infile_path_href} ) { push @outfile_name_prefixes, - map { $infile_name_prefix . $DOT . $infile_type . $UNDERSCORE . $_ } - @outfile_contigs; + map { $infile_name_prefix . $DOT . $infile_type . $UNDERSCORE . $_ } @outfile_contigs; } return @outfile_name_prefixes; } diff --git a/lib/MIP/Recipes/Analysis/Cnvnator.pm b/lib/MIP/Recipes/Analysis/Cnvnator.pm index 328ba9cd2..42e55170b 100644 --- a/lib/MIP/Recipes/Analysis/Cnvnator.pm +++ b/lib/MIP/Recipes/Analysis/Cnvnator.pm @@ -146,7 +146,6 @@ sub analysis_cnvnator { use MIP::Contigs qw{ delete_contig_elements }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Samtools qw{ samtools_create_chromosome_files }; @@ -154,8 +153,9 @@ sub analysis_cnvnator { qw{ bcftools_annotate bcftools_concat bcftools_create_reheader_samples_file bcftools_rename_vcf_samples }; use MIP::Program::Cnvnator qw{ cnvnator_read_extraction cnvnator_histogram cnvnator_statistics cnvnator_partition cnvnator_calling cnvnator_convert_to_vcf }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -178,28 +178,21 @@ sub analysis_cnvnator { my %infile_path = %{ $io{in}{file_path_href} }; my $human_genome_reference = $active_parameter_href->{human_genome_reference}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; my $xargs_file_path_prefix; %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -228,10 +221,10 @@ sub analysis_cnvnator { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -321,12 +314,8 @@ sub analysis_cnvnator { cnv_bin_size => $active_parameter_href->{cnv_bin_size}, referencedirectory_path => $outdir_path_prefix, filehandle => $xargsfilehandle, - stdoutfile_path => $stdbasefile_path_prefix - . $UNDERSCORE - . q{histogram.stdout.txt}, - stderrfile_path => $stdbasefile_path_prefix - . $UNDERSCORE - . q{histogram.stderr.txt}, + stdoutfile_path => $stdbasefile_path_prefix . $UNDERSCORE . q{histogram.stdout.txt}, + stderrfile_path => $stdbasefile_path_prefix . $UNDERSCORE . q{histogram.stderr.txt}, } ); print {$xargsfilehandle} $SEMICOLON . $SPACE; @@ -353,12 +342,8 @@ sub analysis_cnvnator { regions_ref => [$contig], cnv_bin_size => $active_parameter_href->{cnv_bin_size}, filehandle => $xargsfilehandle, - stdoutfile_path => $stdbasefile_path_prefix - . $UNDERSCORE - . q{partition.stdout.txt}, - stderrfile_path => $stdbasefile_path_prefix - . $UNDERSCORE - . q{partition.stderr.txt}, + stdoutfile_path => $stdbasefile_path_prefix . $UNDERSCORE . q{partition.stdout.txt}, + stderrfile_path => $stdbasefile_path_prefix . $UNDERSCORE . q{partition.stderr.txt}, } ); print {$xargsfilehandle} $SEMICOLON . $SPACE; @@ -374,9 +359,7 @@ sub analysis_cnvnator { regions_ref => [$contig], cnv_bin_size => $active_parameter_href->{cnv_bin_size}, filehandle => $xargsfilehandle, - stderrfile_path => $stdbasefile_path_prefix - . $UNDERSCORE - . q{calling.stderr.txt}, + stderrfile_path => $stdbasefile_path_prefix . $UNDERSCORE . q{calling.stderr.txt}, } ); print {$xargsfilehandle} $SEMICOLON . $SPACE; @@ -417,17 +400,11 @@ sub analysis_cnvnator { foreach my $contig (@contigs) { ## Name intermediary files - my $cnvnator_outfile_path = - $outfile_path_prefix . $UNDERSCORE . $contig . $outfile_suffix; + my $cnvnator_outfile_path = $outfile_path_prefix . $UNDERSCORE . $contig . $outfile_suffix; my $fixed_vcffile_path_prefix = $outfile_path_prefix . $UNDERSCORE . $contig . $UNDERSCORE . q{fixed}; my $fixed_header_vcffile_path = - $outfile_path_prefix - . $UNDERSCORE - . $contig - . $UNDERSCORE - . q{annot} - . $outfile_suffix; + $outfile_path_prefix . $UNDERSCORE . $contig . $UNDERSCORE . q{annot} . $outfile_suffix; ## Save infiles for bcftools annotate push @concat_infile_paths, $fixed_header_vcffile_path; @@ -472,7 +449,7 @@ sub analysis_cnvnator { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -484,12 +461,12 @@ sub analysis_cnvnator { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -554,8 +531,7 @@ sub _add_contigs_to_vcfheader { # Reference fai file print {$filehandle} $human_genome_reference . $DOT . q{fai} . $SPACE; - say {$filehandle} q{>} . $SPACE . catfile( $temp_directory, q{contig_header.txt} ), - $NEWLINE; + say {$filehandle} q{>} . $SPACE . catfile( $temp_directory, q{contig_header.txt} ), $NEWLINE; return; } diff --git a/lib/MIP/Recipes/Analysis/Deepvariant.pm b/lib/MIP/Recipes/Analysis/Deepvariant.pm index 36dc8cc2a..303eb7833 100644 --- a/lib/MIP/Recipes/Analysis/Deepvariant.pm +++ b/lib/MIP/Recipes/Analysis/Deepvariant.pm @@ -120,12 +120,11 @@ sub analysis_deepvariant { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Deepvariant qw{ deepvariant }; - use MIP::Sample_info - qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; + use MIP::Sample_info qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -150,18 +149,12 @@ sub analysis_deepvariant { my $infile_suffix = $io{out}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; + + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -170,7 +163,7 @@ sub analysis_deepvariant { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -193,13 +186,13 @@ sub analysis_deepvariant { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, - gpu_number => $recipe_resource{gpu_number}, + gpu_number => $recipe{gpu_number}, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -214,7 +207,7 @@ sub analysis_deepvariant { filehandle => $filehandle, infile_path => $infile_path, model_type => uc $analysis_type, - num_shards => $recipe_resource{core_number}, + num_shards => $recipe{core_number}, outfile_path => $outfile_path, outfile_path_vcf => $outfile_path_vcf, referencefile_path => $active_parameter_href->{human_genome_reference}, @@ -224,7 +217,7 @@ sub analysis_deepvariant { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -239,13 +232,13 @@ sub analysis_deepvariant { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Delly_call.pm b/lib/MIP/Recipes/Analysis/Delly_call.pm index a79b51e59..536f9596d 100644 --- a/lib/MIP/Recipes/Analysis/Delly_call.pm +++ b/lib/MIP/Recipes/Analysis/Delly_call.pm @@ -141,7 +141,7 @@ sub analysis_delly_call { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Delly qw{ delly_call }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -170,17 +170,10 @@ sub analysis_delly_call { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -189,7 +182,7 @@ sub analysis_delly_call { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -212,13 +205,13 @@ sub analysis_delly_call { { active_parameter_href => $active_parameter_href, directory_id => $sample_id, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -243,17 +236,17 @@ sub analysis_delly_call { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_sample}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_sample}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Delly_reformat.pm b/lib/MIP/Recipes/Analysis/Delly_reformat.pm index 681df67a0..769a1d2ce 100644 --- a/lib/MIP/Recipes/Analysis/Delly_reformat.pm +++ b/lib/MIP/Recipes/Analysis/Delly_reformat.pm @@ -137,13 +137,13 @@ sub analysis_delly_reformat { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_merge bcftools_index bcftools_view }; use MIP::Program::Delly qw{ delly_call delly_merge }; use MIP::Program::Picardtools qw{ picardtools_sortvcf }; use MIP::Processmanagement::Processes qw{ print_wait submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -154,27 +154,20 @@ sub analysis_delly_reformat { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -202,8 +195,8 @@ sub analysis_delly_reformat { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -219,9 +212,7 @@ sub analysis_delly_reformat { my %delly_sample_file_info; my $process_batches_count = 1; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { PROGRAM_TAG: while ( my ( $recipe_tag, $stream ) = each %recipe_tag_keys ) { @@ -249,8 +240,7 @@ sub analysis_delly_reformat { ## Delly call bcf sample infiles my @delly_merge_infile_paths = - map { $delly_sample_file_info{$_}{in}{q{.bcf}} } - @{ $active_parameter_href->{sample_ids} }; + map { $delly_sample_file_info{$_}{in}{q{.bcf}} } @{ $active_parameter_href->{sample_ids} }; ## We have something to merge if ( scalar @{ $active_parameter_href->{sample_ids} } > 1 ) { @@ -270,11 +260,8 @@ sub analysis_delly_reformat { infile_paths_ref => \@delly_merge_infile_paths, min_size => 0, max_size => $SV_MAX_SIZE, - outfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{merged} - . $DOT . q{bcf}, - stderrfile_path => $recipe_file_path + outfile_path => $outfile_path_prefix . $UNDERSCORE . q{merged} . $DOT . q{bcf}, + stderrfile_path => $recipe_file_path . $UNDERSCORE . q{merged} . $DOT @@ -357,11 +344,8 @@ sub analysis_delly_reformat { { filehandle => $filehandle, infile_paths_ref => \@delly_genotype_outfile_paths, - outfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{to_sort} - . $outfile_suffix, - output_type => q{v}, + outfile_path => $outfile_path_prefix . $UNDERSCORE . q{to_sort} . $outfile_suffix, + output_type => q{v}, stderrfile_path => $xargs_file_path_prefix . $DOT . q{stderr.txt}, stdoutfile_path => $xargs_file_path_prefix . $DOT . q{stdout.txt}, } @@ -373,17 +357,14 @@ sub analysis_delly_reformat { # Only one sample say {$filehandle} q{## Only one sample - skip merging and regenotyping}; say {$filehandle} -q{## Reformat bcf infile to match outfile from regenotyping with multiple samples}; + q{## Reformat bcf infile to match outfile from regenotyping with multiple samples}; bcftools_view( { filehandle => $filehandle, output_type => q{v}, infile_path => $delly_merge_infile_paths[0], - outfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{to_sort} - . $outfile_suffix, + outfile_path => $outfile_path_prefix . $UNDERSCORE . q{to_sort} . $outfile_suffix, } ); say {$filehandle} $NEWLINE; @@ -393,11 +374,10 @@ q{## Reformat bcf infile to match outfile from regenotyping with multiple sample say {$filehandle} q{## Picard SortVcf}; picardtools_sortvcf( { - filehandle => $filehandle, + filehandle => $filehandle, infile_paths_ref => [ $outfile_path_prefix . $UNDERSCORE . q{to_sort} . $outfile_suffix ], - java_jar => - catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), + java_jar => catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), java_use_large_pages => $active_parameter_href->{java_use_large_pages}, memory_allocation => q{Xmx2g}, outfile_path => $outfile_path_prefix . $DOT . q{vcf}, @@ -413,7 +393,7 @@ q{## Reformat bcf infile to match outfile from regenotyping with multiple sample close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -425,13 +405,13 @@ q{## Reformat bcf infile to match outfile from regenotyping with multiple sample submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Dragen_dna.pm b/lib/MIP/Recipes/Analysis/Dragen_dna.pm index 0eab61809..54f9ef2a8 100644 --- a/lib/MIP/Recipes/Analysis/Dragen_dna.pm +++ b/lib/MIP/Recipes/Analysis/Dragen_dna.pm @@ -126,11 +126,11 @@ sub analysis_dragen_dna_align_vc { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::File::Format::Dragen qw{ create_dragen_fastq_list_sample_id }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Dragen qw{ dragen_dna_analysis }; use MIP::Program::Ssh qw{ ssh }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_read_group set_recipe_metafile_in_sample_info @@ -155,17 +155,10 @@ sub analysis_dragen_dna_align_vc { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -174,7 +167,7 @@ sub analysis_dragen_dna_align_vc { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$sample_id], @@ -188,22 +181,21 @@ sub analysis_dragen_dna_align_vc { my $outdir_path = $io{out}{dir_path}; my $outfile_name_prefix = $io{out}{file_name_prefix}; my $outfile_path = $io{out}{file_path}; - my $outfile_suffix = $io{out}{file_suffix}; ## Filehandles # Create anonymous filehandle my $filehandle = IO::Handle->new(); ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header - my ( $recipe_file_path, $recipe_info_path ) = setup_script( + my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, set_errexit => 0, @@ -293,10 +285,9 @@ sub analysis_dragen_dna_align_vc { print {$filehandle} $SPACE; my @cmds = dragen_dna_analysis( { - alignment_output_format => q{BAM}, - dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, - dragen_hash_ref_dir_path => - $active_parameter_href->{dragen_hash_ref_dir_path}, + alignment_output_format => q{BAM}, + dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, + dragen_hash_ref_dir_path => $active_parameter_href->{dragen_hash_ref_dir_path}, enable_bam_indexing => 1, enable_duplicate_marking => 1, enable_map_align => 1, @@ -325,7 +316,7 @@ sub analysis_dragen_dna_align_vc { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -340,13 +331,13 @@ sub analysis_dragen_dna_align_vc { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -443,11 +434,11 @@ sub analysis_dragen_dna_joint_calling { use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Dragen qw{ dragen_dna_analysis }; use MIP::Program::Ssh qw{ ssh }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -456,17 +447,10 @@ sub analysis_dragen_dna_joint_calling { ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -475,7 +459,7 @@ sub analysis_dragen_dna_joint_calling { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -500,12 +484,12 @@ sub analysis_dragen_dna_joint_calling { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, set_errexit => 0, @@ -514,9 +498,7 @@ sub analysis_dragen_dna_joint_calling { ## Collect infiles for all sample_ids my @dragen_infile_paths; - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { ## Get the io infiles per chain and id my %sample_io = get_io_files( @@ -567,9 +549,8 @@ sub analysis_dragen_dna_joint_calling { print {$filehandle} $SPACE; my @combine_cmds = dragen_dna_analysis( { - dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, - dragen_hash_ref_dir_path => - $active_parameter_href->{dragen_hash_ref_dir_path}, + dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, + dragen_hash_ref_dir_path => $active_parameter_href->{dragen_hash_ref_dir_path}, enable_combinegvcfs => 1, filehandle => $filehandle, force => 1, @@ -601,10 +582,9 @@ sub analysis_dragen_dna_joint_calling { print {$filehandle} $SPACE; my @joint_call_cmds = dragen_dna_analysis( { - dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, - disable_vcf_compression => 1, - dragen_hash_ref_dir_path => - $active_parameter_href->{dragen_hash_ref_dir_path}, + dbsnp_file_path => $active_parameter_href->{dragen_dbsnp}, + disable_vcf_compression => 1, + dragen_hash_ref_dir_path => $active_parameter_href->{dragen_hash_ref_dir_path}, enable_joint_genotyping => 1, filehandle => $filehandle, force => 1, @@ -627,7 +607,7 @@ sub analysis_dragen_dna_joint_calling { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -640,12 +620,12 @@ sub analysis_dragen_dna_joint_calling { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -713,8 +693,7 @@ sub _dragen_wait_loop { say {$filehandle} q{else}; say {$filehandle} q?for i in {1..? . $max_retries . q?}?; say {$filehandle} $TAB . q{do}; - say {$filehandle} $TAB x 2 - . q{echo "$cmd failed $i times: Retrying in $time_to_sleep"}; + say {$filehandle} $TAB x 2 . q{echo "$cmd failed $i times: Retrying in $time_to_sleep"}; say {$filehandle} $TAB x 2, qq{sleep $time_to_sleep}; say {$filehandle} $TAB x 2, $cmd; say {$filehandle} $TAB x 2, q{status=$?}; diff --git a/lib/MIP/Recipes/Analysis/Endvariantannotationblock.pm b/lib/MIP/Recipes/Analysis/Endvariantannotationblock.pm index 265375aab..84cd9214b 100644 --- a/lib/MIP/Recipes/Analysis/Endvariantannotationblock.pm +++ b/lib/MIP/Recipes/Analysis/Endvariantannotationblock.pm @@ -131,12 +131,12 @@ sub analysis_endvariantannotationblock { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Software::Gnu_grep qw{ gnu_grep }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; use MIP::Program::Gatk qw{ gatk_concatenate_variants }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_metafile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -161,18 +161,11 @@ sub analysis_endvariantannotationblock { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path_prefix = $io{in}{file_path_prefix}; - my @contigs = @{ $file_info_href->{contigs} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my @contigs = @{ $file_info_href->{contigs} }; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -188,7 +181,7 @@ sub analysis_endvariantannotationblock { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -214,12 +207,12 @@ sub analysis_endvariantannotationblock { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, directory_id => $case_id, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -267,8 +260,7 @@ sub analysis_endvariantannotationblock { filehandle => $filehandle, filter_file_path => catfile( $reference_dir, - $active_parameter_href - ->{endvariantannotationblock_remove_genes_file} + $active_parameter_href->{endvariantannotationblock_remove_genes_file} ), infile_path => $outfile_paths[$analysis_suffix_index], stdoutfile_path => $grep_outfile_path, @@ -305,7 +297,7 @@ sub analysis_endvariantannotationblock { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $path = $outfile_paths[$analysis_suffix_index] . $DOT . q{gz}; set_recipe_metafile_in_sample_info( @@ -333,17 +325,17 @@ sub analysis_endvariantannotationblock { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -456,10 +448,10 @@ sub analysis_endvariantannotationblock_panel { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_metafile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -483,17 +475,10 @@ sub analysis_endvariantannotationblock_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -503,7 +488,7 @@ sub analysis_endvariantannotationblock_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -525,12 +510,12 @@ sub analysis_endvariantannotationblock_panel { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, directory_id => $case_id, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -568,7 +553,7 @@ sub analysis_endvariantannotationblock_panel { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_metafile_in_sample_info( { @@ -595,17 +580,17 @@ sub analysis_endvariantannotationblock_panel { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Expansionhunter.pm b/lib/MIP/Recipes/Analysis/Expansionhunter.pm index ef09e401e..96bf9e383 100644 --- a/lib/MIP/Recipes/Analysis/Expansionhunter.pm +++ b/lib/MIP/Recipes/Analysis/Expansionhunter.pm @@ -127,14 +127,15 @@ sub analysis_expansionhunter { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ print_wait submit_recipe }; - use MIP::Program::Bcftools qw{ bcftools_index bcftools_norm bcftools_rename_vcf_samples bcftools_view }; + use MIP::Program::Bcftools + qw{ bcftools_index bcftools_norm bcftools_rename_vcf_samples bcftools_view }; use MIP::Program::Expansionhunter qw{ expansionhunter }; use MIP::Program::Htslib qw{ htslib_bgzip }; use MIP::Program::Stranger qw{ stranger }; use MIP::Program::Svdb qw{ svdb_merge }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_pedigree_sample_id_attributes set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -148,21 +149,13 @@ sub analysis_expansionhunter { my $max_cores_per_node = $active_parameter_href->{max_cores_per_node}; my $modifier_core_number = scalar( @{ $active_parameter_href->{sample_ids} } ); - my $human_genome_reference = - $arg_href->{active_parameter_href}{human_genome_reference}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; + my $human_genome_reference = $arg_href->{active_parameter_href}{human_genome_reference}; my $variant_catalog_file_path = $active_parameter_href->{expansionhunter_variant_catalog_file_path}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -170,7 +163,7 @@ sub analysis_expansionhunter { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -194,14 +187,14 @@ sub analysis_expansionhunter { { max_cores_per_node => $max_cores_per_node, modifier_core_number => $modifier_core_number, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); my $memory_allocation = update_memory_allocation( { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -214,7 +207,7 @@ sub analysis_expansionhunter { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -227,9 +220,7 @@ sub analysis_expansionhunter { ## Collect infiles for all sample_ids to enable migration to temporary directory SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { ## Get the io infiles per chain and id my %sample_io = get_io_files( @@ -265,9 +256,7 @@ sub analysis_expansionhunter { my @decompose_outfile_paths; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { $process_batches_count = print_wait( { @@ -307,11 +296,12 @@ sub analysis_expansionhunter { } ); say {$filehandle} $AMPERSAND, $NEWLINE; - push @decompose_infile_paths, $sample_outfile_path_prefix . $outfile_suffix; + push @decompose_infile_paths, $sample_outfile_path_prefix . $outfile_suffix; push @decompose_infile_path_prefixes, $sample_outfile_path_prefix; push @decompose_outfile_paths, $outfile_path_prefix - . $UNDERSCORE . q{decompose} + . $UNDERSCORE + . q{decompose} . $UNDERSCORE . $sample_id . $outfile_suffix; @@ -322,34 +312,38 @@ sub analysis_expansionhunter { ## Split multiallelic variants say {$filehandle} q{## Split multiallelic variants}; ## Create iterator object - my $decompose_file_paths_iter = each_array( @decompose_infile_paths, @decompose_infile_path_prefixes, @decompose_outfile_paths ); + my $decompose_file_paths_iter = + each_array( @decompose_infile_paths, @decompose_infile_path_prefixes, + @decompose_outfile_paths ); DECOMPOSE_FILES_ITER: - while ( my ( $decompose_infile_path, $decompose_infile_path_prefix, $decompose_outfile_path ) = $decompose_file_paths_iter->() ) { + while ( my ( $decompose_infile_path, $decompose_infile_path_prefix, $decompose_outfile_path ) = + $decompose_file_paths_iter->() ) + { - htslib_bgzip ( + htslib_bgzip( { - filehandle => $filehandle, - infile_path => $decompose_infile_path, - stdoutfile_path => $decompose_infile_path_prefix . q{.bcf}, - write_to_stdout => 1, + filehandle => $filehandle, + infile_path => $decompose_infile_path, + stdoutfile_path => $decompose_infile_path_prefix . q{.bcf}, + write_to_stdout => 1, } ); say {$filehandle} $NEWLINE; - bcftools_index ( + bcftools_index( { - filehandle => $filehandle, - infile_path => $decompose_infile_path_prefix . q{.bcf}, - output_type => q{tbi}, + filehandle => $filehandle, + infile_path => $decompose_infile_path_prefix . q{.bcf}, + output_type => q{tbi}, } ); say {$filehandle} $NEWLINE; bcftools_norm( { - filehandle => $filehandle, - infile_path => $decompose_infile_path_prefix . q{.bcf}, - multiallelic => q{-}, - outfile_path => $decompose_outfile_path, + filehandle => $filehandle, + infile_path => $decompose_infile_path_prefix . q{.bcf}, + multiallelic => q{-}, + outfile_path => $decompose_outfile_path, } ); say {$filehandle} $NEWLINE; @@ -401,7 +395,7 @@ sub analysis_expansionhunter { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -425,13 +419,13 @@ sub analysis_expansionhunter { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Fastqc.pm b/lib/MIP/Recipes/Analysis/Fastqc.pm index 41faae8e1..429a1bb9a 100644 --- a/lib/MIP/Recipes/Analysis/Fastqc.pm +++ b/lib/MIP/Recipes/Analysis/Fastqc.pm @@ -130,7 +130,7 @@ sub analysis_fastqc { use MIP::Environment::Cluster qw{ check_max_core_number }; use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ print_wait submit_recipe }; @@ -158,36 +158,28 @@ sub analysis_fastqc { my @infile_paths = @{ $io{in}{file_paths} }; my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Outpaths my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my @outfile_paths = - map { - catdir( $outsample_directory, $_ . $UNDERSCORE . q{fastqc}, q{fastqc_data.txt} ) - } @infile_name_prefixes; + map { catdir( $outsample_directory, $_ . $UNDERSCORE . q{fastqc}, q{fastqc_data.txt} ) } + @infile_name_prefixes; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -246,7 +238,7 @@ sub analysis_fastqc { my $memory_allocation = update_memory_allocation( { parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{core_number}, + process_memory_allocation => $recipe{core_number}, node_ram_memory => $active_parameter_href->{node_ram_memory}, } ); @@ -260,7 +252,7 @@ sub analysis_fastqc { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -306,7 +298,7 @@ sub analysis_fastqc { say {$filehandle} q{&}, $NEWLINE; ## Collect QC metadata info for active recipe for later use - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -324,17 +316,17 @@ sub analysis_fastqc { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_island}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_island}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Frequency_filter.pm b/lib/MIP/Recipes/Analysis/Frequency_filter.pm index 232cb962b..f84a0b2ef 100644 --- a/lib/MIP/Recipes/Analysis/Frequency_filter.pm +++ b/lib/MIP/Recipes/Analysis/Frequency_filter.pm @@ -142,12 +142,12 @@ sub analysis_frequency_filter { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_index bcftools_view }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -171,17 +171,10 @@ sub analysis_frequency_filter { my %infile_path = %{ $io{in}{file_path_href} }; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -191,7 +184,7 @@ sub analysis_frequency_filter { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -216,12 +209,12 @@ sub analysis_frequency_filter { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -237,7 +230,7 @@ sub analysis_frequency_filter { ## Create file commands for xargs ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -250,13 +243,12 @@ sub analysis_frequency_filter { foreach my $contig (@contigs_size_ordered) { ## Get parameters - my $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + my $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; ## Build the exclude filter command my $exclude_filter = _build_bcftools_filter( { - fqf_annotations_ref => $active_parameter_href->{fqf_annotations}, + fqf_annotations_ref => $active_parameter_href->{fqf_annotations}, fqf_bcftools_filter_threshold => $active_parameter_href->{fqf_bcftools_filter_threshold}, vcfanno_file_toml => $active_parameter_href->{vcfanno_config}, @@ -290,7 +282,7 @@ sub analysis_frequency_filter { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -302,13 +294,13 @@ sub analysis_frequency_filter { ); submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -414,10 +406,10 @@ sub analysis_frequency_filter_panel { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_index bcftools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -441,17 +433,10 @@ sub analysis_frequency_filter_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -461,7 +446,7 @@ sub analysis_frequency_filter_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -483,12 +468,12 @@ sub analysis_frequency_filter_panel { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -502,7 +487,7 @@ sub analysis_frequency_filter_panel { ## Build the exclude filter command my $exclude_filter = _build_bcftools_filter( { - fqf_annotations_ref => $active_parameter_href->{fqf_annotations}, + fqf_annotations_ref => $active_parameter_href->{fqf_annotations}, fqf_bcftools_filter_threshold => $active_parameter_href->{fqf_bcftools_filter_threshold}, vcfanno_file_toml => $active_parameter_href->{vcfanno_config}, @@ -531,7 +516,7 @@ sub analysis_frequency_filter_panel { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -543,13 +528,13 @@ sub analysis_frequency_filter_panel { ); submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_case}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_case}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -624,9 +609,7 @@ sub _build_bcftools_filter { my $threshold = $SPACE . q{>} . $SPACE . $fqf_bcftools_filter_threshold . $SPACE; ANNOTATION: - while ( my ( $annotation_index, $annotation_href ) = - each @{ $vcfanno_config{annotation} } ) - { + while ( my ( $annotation_index, $annotation_href ) = each @{ $vcfanno_config{annotation} } ) { ## Limit to requested frequency annotations my @frequency_annotations = diff --git a/lib/MIP/Recipes/Analysis/Gatk_asereadcounter.pm b/lib/MIP/Recipes/Analysis/Gatk_asereadcounter.pm index 868ba4371..0cbd6067d 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_asereadcounter.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_asereadcounter.pm @@ -128,12 +128,12 @@ sub analysis_gatk_asereadcounter { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_asereadcounter }; use MIP::Program::Gatk qw{ gatk_indexfeaturefile }; use MIP::Program::Bcftools qw{ bcftools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; @@ -184,20 +184,13 @@ sub analysis_gatk_asereadcounter { my $alignment_suffix = $alignment_io{out}{file_suffix}; my $alignment_file_path = $alignment_file_path_prefix . $alignment_suffix; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; ## Get module resources - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -208,7 +201,7 @@ sub analysis_gatk_asereadcounter { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$variant_infile_name_prefix], @@ -233,12 +226,12 @@ sub analysis_gatk_asereadcounter { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -291,7 +284,7 @@ sub analysis_gatk_asereadcounter { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -316,13 +309,13 @@ sub analysis_gatk_asereadcounter { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_sample}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_sample}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_baserecalibration.pm b/lib/MIP/Recipes/Analysis/Gatk_baserecalibration.pm index 304bb55dd..c5c2208f8 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_baserecalibration.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_baserecalibration.pm @@ -145,13 +145,13 @@ sub analysis_gatk_baserecalibration { use MIP::File_info qw{ get_merged_infile_prefix }; use MIP::Gatk qw{ get_gatk_intervals }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_applybqsr gatk_baserecalibrator gatk_gatherbqsrreports }; use MIP::Program::Picardtools qw{ picardtools_gatherbamfiles }; use MIP::Program::Samtools qw{ samtools_index samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Sample_info qw{ set_file_path_to_store @@ -181,24 +181,17 @@ sub analysis_gatk_baserecalibration { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Add merged infile name prefix after merging all BAM files per sample_id my $merged_infile_prefix = get_merged_infile_prefix( @@ -210,7 +203,7 @@ sub analysis_gatk_baserecalibration { ## Outpaths ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $outfile_tag = @@ -226,7 +219,7 @@ sub analysis_gatk_baserecalibration { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -255,8 +248,8 @@ sub analysis_gatk_baserecalibration { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -287,7 +280,7 @@ sub analysis_gatk_baserecalibration { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -456,7 +449,7 @@ sub analysis_gatk_baserecalibration { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -485,7 +478,7 @@ sub analysis_gatk_baserecalibration { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -593,11 +586,11 @@ sub analysis_gatk_baserecalibration_panel { use MIP::Active_parameter qw{ get_exome_target_bed_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_applybqsr gatk_baserecalibrator }; use MIP::Program::Samtools qw{ samtools_index samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -620,22 +613,15 @@ sub analysis_gatk_baserecalibration_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Outpaths ## Set and get the io files per chain, id and stream @@ -643,7 +629,7 @@ sub analysis_gatk_baserecalibration_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -670,8 +656,8 @@ sub analysis_gatk_baserecalibration_panel { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $active_parameter_href->{temp_directory}, @@ -687,7 +673,7 @@ sub analysis_gatk_baserecalibration_panel { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -770,7 +756,7 @@ sub analysis_gatk_baserecalibration_panel { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -799,7 +785,7 @@ sub analysis_gatk_baserecalibration_panel { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -925,13 +911,13 @@ sub analysis_gatk_baserecalibration_rna { use MIP::File_info qw{ get_merged_infile_prefix }; use MIP::Gatk qw{ get_gatk_intervals }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_applybqsr gatk_baserecalibrator gatk_gatherbqsrreports }; use MIP::Program::Picardtools qw{ picardtools_gatherbamfiles }; use MIP::Program::Samtools qw{ samtools_index samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -957,24 +943,17 @@ sub analysis_gatk_baserecalibration_rna { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $referencefile_path = $active_parameter_href->{human_genome_reference}; + my $xargs_file_path_prefix; + my $core_number = $recipe{core_number}; ## Add merged infile name prefix after merging all BAM files per sample_id my $merged_infile_prefix = get_merged_infile_prefix( @@ -986,7 +965,7 @@ sub analysis_gatk_baserecalibration_rna { ## Outpaths ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $outfile_tag = @@ -1002,7 +981,7 @@ sub analysis_gatk_baserecalibration_rna { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -1031,8 +1010,8 @@ sub analysis_gatk_baserecalibration_rna { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -1063,7 +1042,7 @@ sub analysis_gatk_baserecalibration_rna { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -1193,7 +1172,7 @@ sub analysis_gatk_baserecalibration_rna { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -1211,7 +1190,7 @@ sub analysis_gatk_baserecalibration_rna { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Gatk_cnnscorevariants.pm b/lib/MIP/Recipes/Analysis/Gatk_cnnscorevariants.pm index 68d130900..9ca39d71d 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_cnnscorevariants.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_cnnscorevariants.pm @@ -122,7 +122,7 @@ sub analysis_gatk_cnnscorevariants { use MIP::Pedigree qw{ create_fam_file gatk_pedigree_flag }; use MIP::Get::File qw{ get_io_files }; use MIP::Parse::File qw{ parse_io_outfiles }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_norm }; @@ -150,18 +150,11 @@ sub analysis_gatk_cnnscorevariants { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -170,7 +163,7 @@ sub analysis_gatk_cnnscorevariants { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -194,12 +187,12 @@ sub analysis_gatk_cnnscorevariants { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -215,9 +208,7 @@ sub analysis_gatk_cnnscorevariants { ## Store sample id bam infiles my @bam_infiles_paths; - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { PROGRAM_TAG: while ( my ( $recipe_tag, $stream ) = each %recipe_tag_keys ) { @@ -293,9 +284,7 @@ sub analysis_gatk_cnnscorevariants { outfile_path => $norm_outfile_path, output_type => q{v}, reference_path => $referencefile_path, - stderrfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{normalized.stderr}, + stderrfile_path => $outfile_path_prefix . $UNDERSCORE . q{normalized.stderr}, } ); say {$filehandle} $NEWLINE; @@ -313,7 +302,7 @@ sub analysis_gatk_cnnscorevariants { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -335,13 +324,13 @@ sub analysis_gatk_cnnscorevariants { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_combinevariantcallsets.pm b/lib/MIP/Recipes/Analysis/Gatk_combinevariantcallsets.pm index ab9cbdd19..e63ff980a 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_combinevariantcallsets.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_combinevariantcallsets.pm @@ -119,11 +119,11 @@ sub analysis_gatk_combinevariantcallsets { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view_and_index_vcf }; use MIP::Program::Gatk qw{ gatk_combinevariants }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -148,20 +148,12 @@ sub analysis_gatk_combinevariantcallsets { my @parallel_chains; ## Unpack parameters - my $gatk_jar = - catfile( $active_parameter_href->{gatk_path}, q{GenomeAnalysisTK.jar} ); - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); + my $gatk_jar = catfile( $active_parameter_href->{gatk_path}, q{GenomeAnalysisTK.jar} ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -169,7 +161,7 @@ sub analysis_gatk_combinevariantcallsets { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -190,12 +182,12 @@ sub analysis_gatk_combinevariantcallsets { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -261,8 +253,8 @@ sub analysis_gatk_combinevariantcallsets { gatk_combinevariants( { - exclude_nonvariants => 1, - filehandle => $filehandle, + exclude_nonvariants => 1, + filehandle => $filehandle, genotype_merge_option => $active_parameter_href->{gatk_combinevariants_genotype_merge_option}, infile_paths_ref => \@combine_infile_paths, @@ -271,7 +263,7 @@ sub analysis_gatk_combinevariantcallsets { logging_level => $active_parameter_href->{gatk_logging_level}, memory_allocation => q{Xmx20g}, outfile_path => $bcftools_infile_path, - prioritize_caller => + prioritize_caller => $active_parameter_href->{gatk_combinevariants_prioritize_caller}, referencefile_path => $referencefile_path, temp_directory => $temp_directory, @@ -301,7 +293,7 @@ sub analysis_gatk_combinevariantcallsets { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -330,13 +322,13 @@ sub analysis_gatk_combinevariantcallsets { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, parallel_chains_ref => \@parallel_chains, diff --git a/lib/MIP/Recipes/Analysis/Gatk_gathervcfs.pm b/lib/MIP/Recipes/Analysis/Gatk_gathervcfs.pm index f64ba10f0..e20fe1992 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_gathervcfs.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_gathervcfs.pm @@ -116,12 +116,12 @@ sub analysis_gatk_gathervcfs { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw(gnu_mv); use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view_and_index_vcf }; use MIP::Program::Gatk qw{ gatk_gathervcfscloud gatk_selectvariants }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_processing_metafile_in_sample_info set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -145,19 +145,12 @@ sub analysis_gatk_gathervcfs { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -166,7 +159,7 @@ sub analysis_gatk_gathervcfs { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -188,12 +181,12 @@ sub analysis_gatk_gathervcfs { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -228,12 +221,11 @@ sub analysis_gatk_gathervcfs { say {$filehandle} q{## GATK SelectVariants}; gatk_selectvariants( { - filehandle => $filehandle, - infile_path => $outfile_path, - java_use_large_pages => - $active_parameter_href->{java_use_large_pages}, - memory_allocation => q{Xmx2g}, - outfile_path => $outfile_path_prefix + filehandle => $filehandle, + infile_path => $outfile_path, + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + memory_allocation => q{Xmx2g}, + outfile_path => $outfile_path_prefix . $UNDERSCORE . q{incnonvariantloci} . $outfile_suffix, @@ -272,7 +264,7 @@ sub analysis_gatk_gathervcfs { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { if ( $active_parameter_href->{gatk_gathervcfs_bcf_file} ) { @@ -298,13 +290,13 @@ sub analysis_gatk_gathervcfs { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_genotypegvcfs.pm b/lib/MIP/Recipes/Analysis/Gatk_genotypegvcfs.pm index b7a4ace15..9a6b00e07 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_genotypegvcfs.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_genotypegvcfs.pm @@ -124,7 +124,7 @@ sub analysis_gatk_genotypegvcfs { use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_cat gnu_echo gnu_rm }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -139,25 +139,18 @@ sub analysis_gatk_genotypegvcfs { ## Unpack parameters my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $recipe_files_tracker = 0; + my $recipe_files_tracker = 0; ## Gatk genotype is most safely processed in single thread mode, but we need some java heap allocation - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $time = $recipe_resource{time}; + my $core_number = $recipe{core_number}; + my $time = $recipe{time}; ## If all sites should be included if ( $active_parameter_href->{gatk_genotypegvcfs_all_sites} == 1 ) { @@ -169,7 +162,7 @@ sub analysis_gatk_genotypegvcfs { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $case_id, @@ -211,7 +204,7 @@ sub analysis_gatk_genotypegvcfs { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, process_time => $time, recipe_directory => $recipe_name, recipe_name => $recipe_name, @@ -274,14 +267,12 @@ sub analysis_gatk_genotypegvcfs { ## Files to import into GenomicsDB if ( $consensus_analysis_type =~ /wes|panel/xms ) { - $sample_name_map_path = - catfile( $outdir_path_prefix, q{analysis_sample_map.txt} ); - my $echo_outfile_path = - catfile( $outdir_path_prefix, q{dynamic_sample_map.txt} ); + $sample_name_map_path = catfile( $outdir_path_prefix, q{analysis_sample_map.txt} ); + my $echo_outfile_path = catfile( $outdir_path_prefix, q{dynamic_sample_map.txt} ); _merge_sample_name_map_files( { - echo_outfile_path => $echo_outfile_path, - filehandle => $filehandle, + echo_outfile_path => $echo_outfile_path, + filehandle => $filehandle, gatk_genotypegvcfs_ref_gvcf => $active_parameter_href->{gatk_genotypegvcfs_ref_gvcf}, outfile_path => $sample_name_map_path, @@ -293,17 +284,17 @@ sub analysis_gatk_genotypegvcfs { gatk_genomicsdbimport( { - filehandle => $filehandle, - genomicsdb_workspace_path => $genomicsdb_file_path, - intervals_ref => [$contig], - infile_paths_ref => \@genotype_infile_paths, - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, - referencefile_path => $active_parameter_href->{human_genome_reference}, - sample_name_map_path => $sample_name_map_path, + filehandle => $filehandle, + genomicsdb_workspace_path => $genomicsdb_file_path, + intervals_ref => [$contig], + infile_paths_ref => \@genotype_infile_paths, + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, + referencefile_path => $active_parameter_href->{human_genome_reference}, + sample_name_map_path => $sample_name_map_path, shared_posixfs_optimizations => 1, temp_directory => $temp_directory, - verbosity => $active_parameter_href->{gatk_logging_level}, + verbosity => $active_parameter_href->{gatk_logging_level}, } ); say {$filehandle} $NEWLINE; @@ -313,38 +304,35 @@ sub analysis_gatk_genotypegvcfs { gatk_genotypegvcfs( { - dbsnp_path => - $active_parameter_href->{gatk_haplotypecaller_snp_known_set}, + dbsnp_path => $active_parameter_href->{gatk_haplotypecaller_snp_known_set}, filehandle => $filehandle, - include_nonvariant_sites => - $active_parameter_href->{gatk_genotypegvcfs_all_sites}, - infile_path => q{gendb://} . $genomicsdb_file_path, - intervals_ref => [$contig], - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, - outfile_path => $outfile_path{$contig}, - pedigree => $fam_file_path, - referencefile_path => $active_parameter_href->{human_genome_reference}, - temp_directory => $temp_directory, - verbosity => $active_parameter_href->{gatk_logging_level}, + include_nonvariant_sites => $active_parameter_href->{gatk_genotypegvcfs_all_sites}, + infile_path => q{gendb://} . $genomicsdb_file_path, + intervals_ref => [$contig], + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, + outfile_path => $outfile_path{$contig}, + pedigree => $fam_file_path, + referencefile_path => $active_parameter_href->{human_genome_reference}, + temp_directory => $temp_directory, + verbosity => $active_parameter_href->{gatk_logging_level}, } ); say {$filehandle} $NEWLINE; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case_parallel}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => - $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case_parallel}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_haplotypecaller.pm b/lib/MIP/Recipes/Analysis/Gatk_haplotypecaller.pm index ac584ce60..7088d30b8 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_haplotypecaller.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_haplotypecaller.pm @@ -144,12 +144,12 @@ sub analysis_gatk_haplotypecaller { use MIP::Cluster qw{ get_parallel_processes }; use MIP::Gatk qw{ get_gatk_intervals }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_haplotypecaller }; use MIP::Program::Gatk qw{ gatk_gathervcfscloud }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -174,26 +174,19 @@ sub analysis_gatk_haplotypecaller { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; my $analysis_type = $active_parameter_href->{analysis_type}{$sample_id}; my $xargs_file_path_prefix; ## Get module parameters - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Outpaths ## Set and get the io files per chain, id and stream @@ -201,7 +194,7 @@ sub analysis_gatk_haplotypecaller { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -230,8 +223,8 @@ sub analysis_gatk_haplotypecaller { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -288,7 +281,7 @@ sub analysis_gatk_haplotypecaller { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -368,7 +361,7 @@ sub analysis_gatk_haplotypecaller { ## Set input files for next module set_io_files( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => [$concat_vcf_path], @@ -377,7 +370,7 @@ sub analysis_gatk_haplotypecaller { } ); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -395,7 +388,7 @@ sub analysis_gatk_haplotypecaller { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -504,11 +497,11 @@ sub analysis_gatk_haplotypecaller_panel { use MIP::Active_parameter qw{ get_exome_target_bed_file }; use MIP::Cluster qw{ update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_haplotypecaller }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -531,24 +524,17 @@ sub analysis_gatk_haplotypecaller_panel { my $infile_path = $io{in}{file_path}; my $infile_name_prefix = $io{in}{file_name_prefix}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; ## Get module parameters - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Outpaths ## Set and get the io files per chain, id and stream @@ -556,7 +542,7 @@ sub analysis_gatk_haplotypecaller_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -593,7 +579,7 @@ sub analysis_gatk_haplotypecaller_panel { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $active_parameter_href->{temp_directory}, @@ -655,7 +641,7 @@ sub analysis_gatk_haplotypecaller_panel { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -673,7 +659,7 @@ sub analysis_gatk_haplotypecaller_panel { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Gatk_splitncigarreads.pm b/lib/MIP/Recipes/Analysis/Gatk_splitncigarreads.pm index ddb2de35e..f459d91e3 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_splitncigarreads.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_splitncigarreads.pm @@ -144,13 +144,13 @@ sub analysis_gatk_splitncigarreads { use MIP::Cluster qw{ get_parallel_processes }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_splitncigarreads }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING @@ -171,28 +171,21 @@ sub analysis_gatk_splitncigarreads { ); my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -223,8 +216,8 @@ sub analysis_gatk_splitncigarreads { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -240,7 +233,7 @@ sub analysis_gatk_splitncigarreads { my $parallel_processes = get_parallel_processes( { process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, core_number => $core_number, } ); @@ -261,12 +254,10 @@ sub analysis_gatk_splitncigarreads { ); CONTIG: - while ( my ( $infile_index, $contig ) = - each @{ $file_info_href->{bam_contigs_size_ordered} } ) + while ( my ( $infile_index, $contig ) = each @{ $file_info_href->{bam_contigs_size_ordered} } ) { - my $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + my $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; gatk_splitncigarreads( { @@ -288,7 +279,7 @@ sub analysis_gatk_splitncigarreads { close $filehandle; close $xargsfilehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $first_outfile_path = $outfile_paths[0]; @@ -305,13 +296,13 @@ sub analysis_gatk_splitncigarreads { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_variantevalall.pm b/lib/MIP/Recipes/Analysis/Gatk_variantevalall.pm index 0a5bd4d63..bff0124fa 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_variantevalall.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_variantevalall.pm @@ -127,7 +127,7 @@ sub analysis_gatk_variantevalall { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Language::Java qw{ java_core }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -155,18 +155,11 @@ sub analysis_gatk_variantevalall { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -176,7 +169,7 @@ sub analysis_gatk_variantevalall { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -201,12 +194,12 @@ sub analysis_gatk_variantevalall { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -220,8 +213,7 @@ sub analysis_gatk_variantevalall { ## GATK SelectVariants say {$filehandle} q{## GATK SelectVariants}; - my $select_outfile_path = - $outfile_path_prefix . $UNDERSCORE . q{select} . $DOT . q{vcf}; + my $select_outfile_path = $outfile_path_prefix . $UNDERSCORE . q{select} . $DOT . q{vcf}; gatk_selectvariants( { filehandle => $filehandle, @@ -242,24 +234,23 @@ sub analysis_gatk_variantevalall { gatk_varianteval( { - filehandle => $filehandle, - dbsnp_file_path => $active_parameter_href->{gatk_varianteval_dbsnp}, - indel_gold_standard_file_path => - $active_parameter_href->{gatk_varianteval_gold}, - infile_paths_ref => [$select_outfile_path], - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - verbosity => $active_parameter_href->{gatk_logging_level}, - memory_allocation => q{Xmx2g}, - outfile_path => $outfile_path, - referencefile_path => $referencefile_path, - temp_directory => $temp_directory, + filehandle => $filehandle, + dbsnp_file_path => $active_parameter_href->{gatk_varianteval_dbsnp}, + indel_gold_standard_file_path => $active_parameter_href->{gatk_varianteval_gold}, + infile_paths_ref => [$select_outfile_path], + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + verbosity => $active_parameter_href->{gatk_logging_level}, + memory_allocation => q{Xmx2g}, + outfile_path => $outfile_path, + referencefile_path => $referencefile_path, + temp_directory => $temp_directory, } ); say {$filehandle} $NEWLINE; close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -273,13 +264,13 @@ sub analysis_gatk_variantevalall { ); submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_variantevalexome.pm b/lib/MIP/Recipes/Analysis/Gatk_variantevalexome.pm index 9e7bb15ff..eb6cbbdac 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_variantevalexome.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_variantevalexome.pm @@ -126,11 +126,11 @@ sub analysis_gatk_variantevalexome { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view }; use MIP::Program::Gatk qw{ gatk_indexfeaturefile gatk_varianteval }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw(set_recipe_outfile_in_sample_info); use MIP::Script::Setup_script qw{ setup_script }; @@ -155,20 +155,12 @@ sub analysis_gatk_variantevalexome { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $gatk_jar = - catfile( $active_parameter_href->{gatk_path}, q{GenomeAnalysisTK.jar} ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; + my $gatk_jar = catfile( $active_parameter_href->{gatk_path}, q{GenomeAnalysisTK.jar} ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -178,7 +170,7 @@ sub analysis_gatk_variantevalexome { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, outdata_dir => $active_parameter_href->{outdata_dir}, @@ -202,12 +194,12 @@ sub analysis_gatk_variantevalexome { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -245,24 +237,23 @@ sub analysis_gatk_variantevalexome { say {$filehandle} q{## GATK varianteval}; gatk_varianteval( { - dbsnp_file_path => $active_parameter_href->{gatk_varianteval_dbsnp}, - filehandle => $filehandle, - indel_gold_standard_file_path => - $active_parameter_href->{gatk_varianteval_gold}, - infile_paths_ref => [$view_outfile_path], - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - verbosity => $active_parameter_href->{gatk_logging_level}, - memory_allocation => q{Xmx2g}, - outfile_path => $outfile_path, - referencefile_path => $referencefile_path, - temp_directory => $temp_directory, + dbsnp_file_path => $active_parameter_href->{gatk_varianteval_dbsnp}, + filehandle => $filehandle, + indel_gold_standard_file_path => $active_parameter_href->{gatk_varianteval_gold}, + infile_paths_ref => [$view_outfile_path], + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + verbosity => $active_parameter_href->{gatk_logging_level}, + memory_allocation => q{Xmx2g}, + outfile_path => $outfile_path, + referencefile_path => $referencefile_path, + temp_directory => $temp_directory, } ); say {$filehandle} $NEWLINE; close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -276,13 +267,13 @@ sub analysis_gatk_variantevalexome { ); submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_variantfiltration.pm b/lib/MIP/Recipes/Analysis/Gatk_variantfiltration.pm index 586f4a5e6..fd1d8ec91 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_variantfiltration.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_variantfiltration.pm @@ -128,12 +128,12 @@ sub analysis_gatk_variantfiltration { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_variantfiltration }; - use MIP::Script::Setup_script qw{ setup_script }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Script::Setup_script qw{ setup_script }; ## Constants Readonly my $JAVA_MEMORY_ALLOCATION => 3; @@ -159,20 +159,13 @@ sub analysis_gatk_variantfiltration { my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; my $infile_path = ${ $io{in}{file_paths} }[0]; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; ## Get module parameters - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -183,7 +176,7 @@ sub analysis_gatk_variantfiltration { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@infile_name_prefixes, @@ -205,12 +198,12 @@ sub analysis_gatk_variantfiltration { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -223,12 +216,12 @@ sub analysis_gatk_variantfiltration { say {$filehandle} q{## GATK VariantFiltration}; gatk_variantfiltration( { - cluster_size => $active_parameter_href->{gatk_variantfiltration_cluster_size}, + cluster_size => $active_parameter_href->{gatk_variantfiltration_cluster_size}, cluster_window_size => $active_parameter_href->{gatk_variantfiltration_cluster_window_size}, - filehandle => $filehandle, - filter_href => $active_parameter_href->{gatk_variantfiltration_filter}, - infile_path => $infile_path, + filehandle => $filehandle, + filter_href => $active_parameter_href->{gatk_variantfiltration_filter}, + infile_path => $infile_path, java_use_large_pages => $active_parameter_href->{java_use_large_pages}, memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, outfile_path => $outfile_path, @@ -241,7 +234,7 @@ sub analysis_gatk_variantfiltration { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -256,13 +249,13 @@ sub analysis_gatk_variantfiltration { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gatk_variantrecalibration.pm b/lib/MIP/Recipes/Analysis/Gatk_variantrecalibration.pm index 9672353b7..2dfe18b4f 100644 --- a/lib/MIP/Recipes/Analysis/Gatk_variantrecalibration.pm +++ b/lib/MIP/Recipes/Analysis/Gatk_variantrecalibration.pm @@ -124,7 +124,7 @@ sub analysis_gatk_variantrecalibration_wes { use MIP::Contigs qw{ delete_contig_elements }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_norm }; @@ -160,22 +160,13 @@ sub analysis_gatk_variantrecalibration_wes { $active_parameter_href->{gatk_variantrecalibration_indel_max_gaussians}; my $enable_snv_max_gaussians_filter = $active_parameter_href->{gatk_variantrecalibration_snv_max_gaussians}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my $resource_indel_href = - $active_parameter_href->{gatk_variantrecalibration_resource_indel}; - my $resource_snv_href = - $active_parameter_href->{gatk_variantrecalibration_resource_snv}; - my %recipe_resource = get_recipe_resources( + my $referencefile_path = $active_parameter_href->{human_genome_reference}; + my $resource_indel_href = $active_parameter_href->{gatk_variantrecalibration_resource_indel}; + my $resource_snv_href = $active_parameter_href->{gatk_variantrecalibration_resource_snv}; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -184,7 +175,7 @@ sub analysis_gatk_variantrecalibration_wes { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -207,12 +198,12 @@ sub analysis_gatk_variantrecalibration_wes { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -257,10 +248,8 @@ sub analysis_gatk_variantrecalibration_wes { ## Get parameters my $max_gaussian_level; - my @ts_tranches = - @{ $active_parameter_href->{gatk_variantrecalibration_ts_tranches} }; - my @annotations = - @{ $active_parameter_href->{gatk_variantrecalibration_annotations} }; + my @ts_tranches = @{ $active_parameter_href->{gatk_variantrecalibration_ts_tranches} }; + my @annotations = @{ $active_parameter_href->{gatk_variantrecalibration_annotations} }; ### Special case: Not to be used with hybrid capture ## Removes an element from array and return new array while leaving orginal contigs_ref untouched @@ -270,8 +259,7 @@ sub analysis_gatk_variantrecalibration_wes { remove_contigs_ref => [qw{ DP }], } ); - my @snv_resources = - _build_gatk_resource_command( { resources_href => $resource_snv_href, } ); + my @snv_resources = _build_gatk_resource_command( { resources_href => $resource_snv_href, } ); my @indel_resources = _build_gatk_resource_command( { resources_href => $resource_indel_href, } ); @@ -316,11 +304,9 @@ sub analysis_gatk_variantrecalibration_wes { ## Exome and panel analysis use combined reference for more power ## Infile genotypegvcfs combined vcf which used reference gVCFs to create combined vcf file - $ts_filter_level = - $active_parameter_href->{gatk_variantrecalibration_snv_tsfilter_level}; + $ts_filter_level = $active_parameter_href->{gatk_variantrecalibration_snv_tsfilter_level}; - my $apply_vqsr_outfile_path = - $outfile_path_prefix . $UNDERSCORE . q{apply} . $outfile_suffix; + my $apply_vqsr_outfile_path = $outfile_path_prefix . $UNDERSCORE . q{apply} . $outfile_suffix; gatk_applyvqsr( { filehandle => $filehandle, @@ -356,9 +342,7 @@ sub analysis_gatk_variantrecalibration_wes { outfile_path => $norm_outfile_path, output_type => q{v}, reference_path => $referencefile_path, - stderrfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{normalized.stderr}, + stderrfile_path => $outfile_path_prefix . $UNDERSCORE . q{normalized.stderr}, } ); ## Set outfile path for next step @@ -401,15 +385,15 @@ sub analysis_gatk_variantrecalibration_wes { $outfile_path_prefix . $UNDERSCORE . q{refined} . $outfile_suffix; gatk_calculategenotypeposteriors( { - filehandle => $filehandle, - infile_path => $outfile_path, - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - memory_allocation => q{Xmx6g}, + filehandle => $filehandle, + infile_path => $outfile_path, + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + memory_allocation => q{Xmx6g}, num_ref_samples_if_no_call => $active_parameter_href->{gatk_num_reference_samples_if_no_call}, - outfile_path => $calculategt_outfile_path, - pedigree => $commands{pedigree}, - referencefile_path => $referencefile_path, + outfile_path => $calculategt_outfile_path, + pedigree => $commands{pedigree}, + referencefile_path => $referencefile_path, supporting_callset_file_path => $active_parameter_href->{gatk_calculate_genotype_call_set}, temp_directory => $temp_directory, @@ -458,7 +442,7 @@ sub analysis_gatk_variantrecalibration_wes { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -480,13 +464,13 @@ sub analysis_gatk_variantrecalibration_wes { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -591,7 +575,7 @@ sub analysis_gatk_variantrecalibration_wgs { use MIP::Contigs qw{ delete_contig_elements }; use MIP::Pedigree qw{ create_fam_file gatk_pedigree_flag }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -632,22 +616,13 @@ sub analysis_gatk_variantrecalibration_wgs { $active_parameter_href->{gatk_variantrecalibration_indel_max_gaussians}; my $enable_snv_max_gaussians_filter = $active_parameter_href->{gatk_variantrecalibration_snv_max_gaussians}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my $resource_indel_href = - $active_parameter_href->{gatk_variantrecalibration_resource_indel}; - my $resource_snv_href = - $active_parameter_href->{gatk_variantrecalibration_resource_snv}; - my %recipe_resource = get_recipe_resources( + my $referencefile_path = $active_parameter_href->{human_genome_reference}; + my $resource_indel_href = $active_parameter_href->{gatk_variantrecalibration_resource_indel}; + my $resource_snv_href = $active_parameter_href->{gatk_variantrecalibration_resource_snv}; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -656,7 +631,7 @@ sub analysis_gatk_variantrecalibration_wgs { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -679,12 +654,12 @@ sub analysis_gatk_variantrecalibration_wgs { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -730,8 +705,7 @@ sub analysis_gatk_variantrecalibration_wgs { ## Get parameters my $max_gaussian_level; my $varrecal_infile_path; - my @ts_tranches = - @{ $active_parameter_href->{gatk_variantrecalibration_ts_tranches} }; + my @ts_tranches = @{ $active_parameter_href->{gatk_variantrecalibration_ts_tranches} }; if ( $mode eq q{SNP} ) { @@ -755,8 +729,7 @@ sub analysis_gatk_variantrecalibration_wgs { ## Use created recalibrated snp vcf as input if ( $mode eq q{INDEL} ) { - $varrecal_infile_path = - $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; + $varrecal_infile_path = $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; ## Use hard filtering if ($enable_indel_max_gaussians_filter) { @@ -765,8 +738,7 @@ sub analysis_gatk_variantrecalibration_wgs { } } - my @annotations = - @{ $active_parameter_href->{gatk_variantrecalibration_annotations} }; + my @annotations = @{ $active_parameter_href->{gatk_variantrecalibration_annotations} }; ## Special case: Not to be used with hybrid capture. NOTE: Disabled when analysing wes + wgs in the same run if ( $consensus_analysis_type ne q{wgs} ) { @@ -783,8 +755,7 @@ sub analysis_gatk_variantrecalibration_wgs { my @resources; if ( $mode eq q{SNP} ) { - @resources = - _build_gatk_resource_command( { resources_href => $resource_snv_href, } ); + @resources = _build_gatk_resource_command( { resources_href => $resource_snv_href, } ); } if ( $mode eq q{INDEL} ) { @@ -817,8 +788,8 @@ sub analysis_gatk_variantrecalibration_wgs { temp_directory => $temp_directory, tranches_file_path => $recal_file_path . $DOT . q{tranches}, ts_tranches_ref => \@ts_tranches, - trust_all_polymorphic => $active_parameter_href - ->{gatk_variantrecalibration_trust_all_polymorphic}, + trust_all_polymorphic => + $active_parameter_href->{gatk_variantrecalibration_trust_all_polymorphic}, verbosity => $active_parameter_href->{gatk_logging_level}, } ); @@ -834,9 +805,8 @@ sub analysis_gatk_variantrecalibration_wgs { if ( $mode eq q{SNP} ) { - $applyvqsr_infile_path = $varrecal_infile_path; - $applyvqsr_outfile_path = - $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; + $applyvqsr_infile_path = $varrecal_infile_path; + $applyvqsr_outfile_path = $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; $ts_filter_level = $active_parameter_href->{gatk_variantrecalibration_snv_tsfilter_level}; } @@ -844,8 +814,7 @@ sub analysis_gatk_variantrecalibration_wgs { ## Use created recalibrated snp vcf as input if ( $mode eq q{INDEL} ) { - $applyvqsr_infile_path = - $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; + $applyvqsr_infile_path = $outfile_path_prefix . $DOT . q{SNV} . $outfile_suffix; $applyvqsr_outfile_path = $outfile_path; $ts_filter_level = $active_parameter_href->{gatk_variantrecalibration_indel_tsfilter_level}; @@ -881,15 +850,15 @@ sub analysis_gatk_variantrecalibration_wgs { $outfile_path_prefix . $UNDERSCORE . q{refined} . $outfile_suffix; gatk_calculategenotypeposteriors( { - filehandle => $filehandle, - infile_path => $outfile_path, - java_use_large_pages => $active_parameter_href->{java_use_large_pages}, - memory_allocation => q{Xmx6g}, + filehandle => $filehandle, + infile_path => $outfile_path, + java_use_large_pages => $active_parameter_href->{java_use_large_pages}, + memory_allocation => q{Xmx6g}, num_ref_samples_if_no_call => $active_parameter_href->{gatk_num_reference_samples_if_no_call}, - outfile_path => $calculategt_outfile_path, - pedigree => $commands{pedigree}, - referencefile_path => $referencefile_path, + outfile_path => $calculategt_outfile_path, + pedigree => $commands{pedigree}, + referencefile_path => $referencefile_path, supporting_callset_file_path => $active_parameter_href->{gatk_calculate_genotype_call_set}, temp_directory => $temp_directory, @@ -938,7 +907,7 @@ sub analysis_gatk_variantrecalibration_wgs { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -960,13 +929,13 @@ sub analysis_gatk_variantrecalibration_wgs { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Genebody_coverage.pm b/lib/MIP/Recipes/Analysis/Genebody_coverage.pm index c515208da..ec653b41b 100644 --- a/lib/MIP/Recipes/Analysis/Genebody_coverage.pm +++ b/lib/MIP/Recipes/Analysis/Genebody_coverage.pm @@ -120,7 +120,7 @@ sub analysis_genebody_coverage { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp gnu_rm }; use MIP::Program::Rseqc qw{ rseqc_bam2wig rseqc_genebody_coverage2 }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -150,17 +150,10 @@ sub analysis_genebody_coverage { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -169,7 +162,7 @@ sub analysis_genebody_coverage { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -192,12 +185,12 @@ sub analysis_genebody_coverage { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -249,10 +242,8 @@ sub analysis_genebody_coverage { ## Cleanup say {$filehandle} q{## Cleanup}; - my @temp_files = ( - $infile_path_prefix . $infile_suffix . $DOT . q{bai}, - $outfile_path_prefix . $DOT . q{wig} - ); + my @temp_files = ( $infile_path_prefix . $infile_suffix . $DOT . q{bai}, + $outfile_path_prefix . $DOT . q{wig} ); TEMP_FILE: foreach my $temp_file (@temp_files) { gnu_rm( @@ -267,11 +258,10 @@ sub analysis_genebody_coverage { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use - my $outfile_path = - $outfile_path_prefix . $DOT . q{geneBodyCoverage} . $outfile_suffix; + my $outfile_path = $outfile_path_prefix . $DOT . q{geneBodyCoverage} . $outfile_suffix; set_recipe_outfile_in_sample_info( { infile => $outfile_name_prefix, @@ -284,13 +274,13 @@ sub analysis_genebody_coverage { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gffcompare.pm b/lib/MIP/Recipes/Analysis/Gffcompare.pm index a3bb539f8..e5620cd47 100644 --- a/lib/MIP/Recipes/Analysis/Gffcompare.pm +++ b/lib/MIP/Recipes/Analysis/Gffcompare.pm @@ -134,11 +134,11 @@ sub analysis_gffcompare { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gffcompare qw{ gffcompare }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; @@ -169,18 +169,11 @@ sub analysis_gffcompare { ## GffCompare can take multiple inputs. Add input gtfs as necessary my @infile_paths = ($infile_path); - my %recipe_attribute = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $recipe_attribute{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $annotationfile_path = $active_parameter_href->{transcript_annotation}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -191,7 +184,7 @@ sub analysis_gffcompare { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], id => $sample_id, @@ -215,12 +208,12 @@ sub analysis_gffcompare { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -233,9 +226,9 @@ sub analysis_gffcompare { say {$filehandle} q{## GffCompare}; gffcompare( { - filehandle => $filehandle, - genome_sequence_path => $active_parameter_href->{human_genome_reference}, - gtf_reference_path => $active_parameter_href->{transcript_annotation}, + filehandle => $filehandle, + genome_sequence_path => $active_parameter_href->{human_genome_reference}, + gtf_reference_path => $active_parameter_href->{transcript_annotation}, ignore_non_overlapping_ref => 1, infile_paths_ref => \@infile_paths, outfile_path_prefix => $outfile_path_prefix, @@ -245,9 +238,9 @@ sub analysis_gffcompare { ## Rename output files say {$filehandle} q{## Rename and move GFFCompare output}; - my $gff_output_path = $outfile_path_prefix . $DOT . q{annotated.gtf}; - my $refmap_infile_path = catfile( $indir_path, - $outfile_name_prefix . $DOT . $infile_name . $DOT . q{refmap} ); + my $gff_output_path = $outfile_path_prefix . $DOT . q{annotated.gtf}; + my $refmap_infile_path = + catfile( $indir_path, $outfile_name_prefix . $DOT . $infile_name . $DOT . q{refmap} ); my $tmap_infile_path = catfile( $indir_path, $outfile_name_prefix . $DOT . $infile_name . $DOT . q{tmap} ); @@ -271,7 +264,7 @@ sub analysis_gffcompare { ## Close filehandle close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -296,13 +289,13 @@ sub analysis_gffcompare { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Glnexus.pm b/lib/MIP/Recipes/Analysis/Glnexus.pm index 6cbfea275..20427310e 100644 --- a/lib/MIP/Recipes/Analysis/Glnexus.pm +++ b/lib/MIP/Recipes/Analysis/Glnexus.pm @@ -112,13 +112,13 @@ sub analysis_glnexus { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_view_and_index_vcf }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Program::Glnexus qw{ glnexus_merge }; use MIP::Program::Htslib qw{ htslib_bgzip }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -132,26 +132,17 @@ sub analysis_glnexus { ( MIXED => q{WGS}, PANEL => q{WES}, WGS => q{WGS}, WES => q{WES} ); my $consensus_analysis_type = - $consensus_analysis_type_map{ uc $parameter_href->{cache}{consensus_analysis_type} - }; + $consensus_analysis_type_map{ uc $parameter_href->{cache}{consensus_analysis_type} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $time = $recipe_resource{time}; + my $core_number = $recipe{core_number}; + my $time = $recipe{time}; ## Get the io infiles per chain and id my @genotype_infile_paths; @@ -176,7 +167,7 @@ sub analysis_glnexus { my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -187,8 +178,8 @@ sub analysis_glnexus { ); my $outfile_path_prefix = $io{out}{file_path_prefix}; - my $outfile_path = catdir( $active_parameter_href->{temp_directory}, - $io{out}{file_name_prefix} . q{.vcf} ); + my $outfile_path = + catdir( $active_parameter_href->{temp_directory}, $io{out}{file_name_prefix} . q{.vcf} ); ## Filehandles # Create anonymous filehandle @@ -198,12 +189,12 @@ sub analysis_glnexus { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -222,8 +213,8 @@ sub analysis_glnexus { glnexus_merge( { - config => $config_type, - dir => catdir( $active_parameter_href->{temp_directory}, q{glnexus} ), + config => $config_type, + dir => catdir( $active_parameter_href->{temp_directory}, q{glnexus} ), filehandle => $filehandle, infile_paths_ref => \@genotype_infile_paths, stdoutfile_path => $outfile_path, @@ -261,7 +252,7 @@ sub analysis_glnexus { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -273,13 +264,13 @@ sub analysis_glnexus { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - log => $log, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + log => $log, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Gzip_fastq.pm b/lib/MIP/Recipes/Analysis/Gzip_fastq.pm index e3d77922e..efe71d67c 100644 --- a/lib/MIP/Recipes/Analysis/Gzip_fastq.pm +++ b/lib/MIP/Recipes/Analysis/Gzip_fastq.pm @@ -123,10 +123,10 @@ sub analysis_gzip_fastq { use MIP::Environment::Cluster qw{ check_max_core_number }; use MIP::File_info qw{ get_is_sample_files_compressed get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gzip qw{ gzip }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; my $is_files_compressed = get_is_sample_files_compressed( @@ -157,21 +157,14 @@ sub analysis_gzip_fastq { my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Outpaths my @outfile_paths = @@ -182,7 +175,7 @@ sub analysis_gzip_fastq { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -194,7 +187,7 @@ sub analysis_gzip_fastq { ## Adjust according to number of infiles to process # One full lane on Hiseq takes approx. 2 h for gzip to process - my $time = $recipe_resource{time} * scalar @infile_names; + my $time = $recipe{time} * scalar @infile_names; ## Filehandles # Create anonymous filehandle @@ -243,7 +236,7 @@ sub analysis_gzip_fastq { directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, process_time => $time, recipe_directory => $recipe_name, recipe_name => $recipe_name, @@ -295,14 +288,14 @@ sub analysis_gzip_fastq { print {$filehandle} $NEWLINE; say {$filehandle} q{wait}, $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{island_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Manta.pm b/lib/MIP/Recipes/Analysis/Manta.pm index 976ff4540..388f01f49 100644 --- a/lib/MIP/Recipes/Analysis/Manta.pm +++ b/lib/MIP/Recipes/Analysis/Manta.pm @@ -126,7 +126,7 @@ sub analysis_manta { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_rm }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -142,26 +142,19 @@ sub analysis_manta { ## Unpack parameters my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -187,8 +180,8 @@ sub analysis_manta { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -198,9 +191,7 @@ sub analysis_manta { ## Collect infiles for all sample_ids to enable migration to temporary directory my @manta_infile_paths; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { ## Get the io infiles per chain and id my %sample_io = get_io_files( @@ -246,13 +237,12 @@ sub analysis_manta { manta_config( { - call_regions_file_path => - $active_parameter_href->{manta_call_regions_file_path}, - exome_analysis => $is_exome_analysis, - filehandle => $filehandle, - infile_paths_ref => \@manta_infile_paths, - outdirectory_path => $outdir_path, - referencefile_path => $referencefile_path, + call_regions_file_path => $active_parameter_href->{manta_call_regions_file_path}, + exome_analysis => $is_exome_analysis, + filehandle => $filehandle, + infile_paths_ref => \@manta_infile_paths, + outdirectory_path => $outdir_path, + referencefile_path => $referencefile_path, } ); say {$filehandle} $NEWLINE; @@ -268,8 +258,7 @@ sub analysis_manta { ); say {$filehandle} $NEWLINE; - my $manta_temp_outfile_path = - catfile( $outdir_path, qw{ results variants diploidSV.vcf.gz } ); + my $manta_temp_outfile_path = catfile( $outdir_path, qw{ results variants diploidSV.vcf.gz } ); ## Perl wrapper for writing gzip recipe to $filehandle gzip( @@ -285,7 +274,7 @@ sub analysis_manta { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -296,13 +285,13 @@ sub analysis_manta { ); submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Markduplicates.pm b/lib/MIP/Recipes/Analysis/Markduplicates.pm index fa8540831..bf6ae5a64 100644 --- a/lib/MIP/Recipes/Analysis/Markduplicates.pm +++ b/lib/MIP/Recipes/Analysis/Markduplicates.pm @@ -144,12 +144,12 @@ sub analysis_markduplicates { use MIP::Cluster qw{ get_parallel_processes update_memory_allocation }; use MIP::File_info qw{ get_merged_infile_prefix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cat }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_markduplicates picardtools_gatherbamfiles }; use MIP::Program::Samtools qw{ samtools_flagstat samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -172,24 +172,17 @@ sub analysis_markduplicates { ); my %infile_path = %{ $io{in}{file_path_href} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Add merged infile name prefix after merging all BAM files per sample_id my $merged_infile_prefix = get_merged_infile_prefix( @@ -201,7 +194,7 @@ sub analysis_markduplicates { ## Outpaths ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $outfile_tag = @@ -217,7 +210,7 @@ sub analysis_markduplicates { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -257,7 +250,7 @@ sub analysis_markduplicates { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -273,7 +266,7 @@ sub analysis_markduplicates { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -368,7 +361,7 @@ sub analysis_markduplicates { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -386,7 +379,7 @@ sub analysis_markduplicates { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -494,11 +487,11 @@ sub analysis_markduplicates_panel { use MIP::Cluster qw{ update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_markduplicates }; use MIP::Program::Samtools qw{ samtools_flagstat samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -521,30 +514,23 @@ sub analysis_markduplicates_panel { my $infile_path = $io{in}{file_path}; my $infile_name_prefix = $io{in}{file_name_prefix}; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -586,7 +572,7 @@ sub analysis_markduplicates_panel { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => ${active_parameter_href}->{temp_directory}, @@ -639,7 +625,7 @@ sub analysis_markduplicates_panel { ## Close filehandles close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -657,7 +643,7 @@ sub analysis_markduplicates_panel { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -781,13 +767,13 @@ sub analysis_markduplicates_rna { use MIP::Cluster qw{ get_parallel_processes update_memory_allocation }; use MIP::File_info qw{ get_merged_infile_prefix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cat }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_markduplicates picardtools_gatherbamfiles }; use MIP::Program::Samtools qw{ samtools_flagstat samtools_index samtools_view }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; @@ -811,24 +797,17 @@ sub analysis_markduplicates_rna { ); my %infile_path = %{ $io{in}{file_path_href} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_path = $active_parameter_href->{human_genome_reference}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Add merged infile name prefix after merging all BAM files per sample_id my $merged_infile_prefix = get_merged_infile_prefix( @@ -840,7 +819,7 @@ sub analysis_markduplicates_rna { ## Outpaths ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $outfile_tag = @@ -856,7 +835,7 @@ sub analysis_markduplicates_rna { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -896,7 +875,7 @@ sub analysis_markduplicates_rna { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -912,7 +891,7 @@ sub analysis_markduplicates_rna { { core_number => $core_number, process_memory_allocation => $process_memory_allocation, - recipe_memory_allocation => $recipe_resource{memory}, + recipe_memory_allocation => $recipe{memory}, } ); @@ -1035,7 +1014,7 @@ sub analysis_markduplicates_rna { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -1074,7 +1053,7 @@ sub analysis_markduplicates_rna { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Mip_qccollect.pm b/lib/MIP/Recipes/Analysis/Mip_qccollect.pm index ad9ca1187..6364623d1 100644 --- a/lib/MIP/Recipes/Analysis/Mip_qccollect.pm +++ b/lib/MIP/Recipes/Analysis/Mip_qccollect.pm @@ -117,10 +117,10 @@ sub analysis_mip_qccollect { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Mip qw{ mip_qccollect }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -130,24 +130,17 @@ sub analysis_mip_qccollect { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -168,14 +161,14 @@ sub analysis_mip_qccollect { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, } ); @@ -197,7 +190,7 @@ sub analysis_mip_qccollect { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -222,7 +215,7 @@ sub analysis_mip_qccollect { base_command => $profile_base_command, dependency_method => q{add_to_all}, job_dependency_type => q{afterok}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Mip_vcfparser.pm b/lib/MIP/Recipes/Analysis/Mip_vcfparser.pm index 667bf313c..be4511ea4 100644 --- a/lib/MIP/Recipes/Analysis/Mip_vcfparser.pm +++ b/lib/MIP/Recipes/Analysis/Mip_vcfparser.pm @@ -140,12 +140,12 @@ sub analysis_mip_vcfparser { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Mip qw{ mip_vcfparser }; - use MIP::Sample_info qw{ set_gene_panel set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_gene_panel set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -169,17 +169,10 @@ sub analysis_mip_vcfparser { my %infile_path = %{ $io{in}{file_path_href} }; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -211,7 +204,7 @@ sub analysis_mip_vcfparser { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -238,7 +231,7 @@ sub analysis_mip_vcfparser { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar @{ $file_info_href->{contigs} }, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); ## Update memory depending on how many cores that are being used @@ -246,7 +239,7 @@ sub analysis_mip_vcfparser { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -259,7 +252,7 @@ sub analysis_mip_vcfparser { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -296,8 +289,7 @@ sub analysis_mip_vcfparser { $padding = $ANNOTATION_DISTANCE_MT; } - my $log_file_path = - catfile( $outdir_path, q{vcfparser} . $UNDERSCORE . $contig . q{.log} ); + my $log_file_path = catfile( $outdir_path, q{vcfparser} . $UNDERSCORE . $contig . q{.log} ); my $vcfparser_xargs_file_path_prefix = $xargs_file_path_prefix . $DOT . $contig; my @select_feature_annotation_columns; my $select_file; @@ -323,14 +315,10 @@ sub analysis_mip_vcfparser { $select_file_matching_column = $active_parameter_href->{vcfparser_select_file_matching_column}; - if ( - exists - $active_parameter_href->{vcfparser_select_feature_annotation_columns} - ) + if ( exists $active_parameter_href->{vcfparser_select_feature_annotation_columns} ) { @select_feature_annotation_columns = - @{ $active_parameter_href - ->{vcfparser_select_feature_annotation_columns} }; + @{ $active_parameter_href->{vcfparser_select_feature_annotation_columns} }; } my $select_outfile_suffix_key = $contig . $UNDERSCORE . $vcfparser_analysis_types[1]; @@ -345,19 +333,14 @@ sub analysis_mip_vcfparser { log_file_path => $log_file_path, padding => $padding, parse_vep => $active_parameter_href->{varianteffectpredictor}, - range_feature_annotation_columns_ref => \@{ - $active_parameter_href->{vcfparser_range_feature_annotation_columns} - }, - range_feature_file_path => - $active_parameter_href->{vcfparser_range_feature_file}, - select_feature_annotation_columns_ref => - \@select_feature_annotation_columns, - select_feature_file_path => $select_file, - select_feature_matching_column => $select_file_matching_column, - select_outfile => $select_outfile, - stderrfile_path => $vcfparser_xargs_file_path_prefix - . $DOT - . q{stderr.txt}, + range_feature_annotation_columns_ref => + \@{ $active_parameter_href->{vcfparser_range_feature_annotation_columns} }, + range_feature_file_path => $active_parameter_href->{vcfparser_range_feature_file}, + select_feature_annotation_columns_ref => \@select_feature_annotation_columns, + select_feature_file_path => $select_file, + select_feature_matching_column => $select_file_matching_column, + select_outfile => $select_outfile, + stderrfile_path => $vcfparser_xargs_file_path_prefix . $DOT . q{stderr.txt}, stdoutfile_path => $outfile_path{$contig}, } ); @@ -377,7 +360,7 @@ sub analysis_mip_vcfparser { } say {$xargsfilehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my %gene_panels = ( range_file => q{vcfparser_range_feature_file}, @@ -390,8 +373,7 @@ sub analysis_mip_vcfparser { ## Collect databases(s) from a potentially merged gene panel file and adds them to sample_info set_gene_panel( { - aggregate_gene_panel_file => - $active_parameter_href->{$gene_panel_file}, + aggregate_gene_panel_file => $active_parameter_href->{$gene_panel_file}, aggregate_gene_panels_key => $gene_panel_key, recipe_name => $recipe_name, sample_info_href => $sample_info_href, @@ -413,17 +395,17 @@ sub analysis_mip_vcfparser { close $xargsfilehandle or $log->logcroak(q{Could not close $xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -530,7 +512,6 @@ sub analysis_mip_vcfparser_panel { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::List qw{ get_splitted_lists }; use MIP::Parameter qw{ get_cache }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -538,6 +519,7 @@ sub analysis_mip_vcfparser_panel { use MIP::Program::Bcftools qw{ bcftools_view bcftools_view_and_index_vcf }; use MIP::Program::Gatk qw{ gatk_concatenate_variants }; use MIP::Program::Mip qw{ mip_vcfparser }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_gene_panel set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -561,17 +543,10 @@ sub analysis_mip_vcfparser_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -594,7 +569,7 @@ sub analysis_mip_vcfparser_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -621,12 +596,12 @@ sub analysis_mip_vcfparser_panel { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -706,17 +681,14 @@ sub analysis_mip_vcfparser_panel { log_file_path => $log_file_path, padding => $paddings[$index], parse_vep => $active_parameter_href->{varianteffectpredictor}, - range_feature_annotation_columns_ref => \@{ - $active_parameter_href->{vcfparser_range_feature_annotation_columns} - }, - range_feature_file_path => - $active_parameter_href->{vcfparser_range_feature_file}, - select_feature_annotation_columns_ref => - \@select_feature_annotation_columns, - select_feature_file_path => $select_file_path, - select_feature_matching_column => $select_file_matching_column, - select_outfile => $temp_select_outfile_paths[$index], - stdoutfile_path => $temp_outfile_paths[$index], + range_feature_annotation_columns_ref => + \@{ $active_parameter_href->{vcfparser_range_feature_annotation_columns} }, + range_feature_file_path => $active_parameter_href->{vcfparser_range_feature_file}, + select_feature_annotation_columns_ref => \@select_feature_annotation_columns, + select_feature_file_path => $select_file_path, + select_feature_matching_column => $select_file_matching_column, + select_outfile => $temp_select_outfile_paths[$index], + stdoutfile_path => $temp_outfile_paths[$index], } ); say {$filehandle} $NEWLINE; @@ -741,7 +713,7 @@ sub analysis_mip_vcfparser_panel { close $filehandle or $log->logcroak(q{Could not close $filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my %gene_panels = ( range_file => q{vcfparser_range_feature_file}, @@ -754,8 +726,7 @@ sub analysis_mip_vcfparser_panel { ## Collect databases(s) from a potentially merged gene panel file and adds them to sample_info set_gene_panel( { - aggregate_gene_panel_file => - $active_parameter_href->{$gene_panel_file}, + aggregate_gene_panel_file => $active_parameter_href->{$gene_panel_file}, aggregate_gene_panels_key => $gene_panel_key, recipe_name => $recipe_name, sample_info_href => $sample_info_href, @@ -774,13 +745,13 @@ sub analysis_mip_vcfparser_panel { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -894,13 +865,13 @@ sub analysis_mip_vcfparser_sv_wes { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_concatenate_variants }; use MIP::Program::Mip qw{ mip_vcfparser }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script}; ### PREPROCESSING: @@ -926,17 +897,10 @@ sub analysis_mip_vcfparser_sv_wes { my $infile_path = $infile_path_prefix . $infile_suffix; my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -953,8 +917,7 @@ sub analysis_mip_vcfparser_sv_wes { my @vcfparser_analysis_types = get_vcf_parser_analysis_suffix( { - vcfparser_outfile_count => - $active_parameter_href->{sv_vcfparser_outfile_count}, + vcfparser_outfile_count => $active_parameter_href->{sv_vcfparser_outfile_count}, } ); @@ -965,7 +928,7 @@ sub analysis_mip_vcfparser_sv_wes { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -991,7 +954,7 @@ sub analysis_mip_vcfparser_sv_wes { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $MEMORY_ALLOCATION, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -1017,14 +980,10 @@ sub analysis_mip_vcfparser_sv_wes { $select_file_matching_column = $active_parameter_href->{sv_vcfparser_select_file_matching_column}; - if ( - exists - $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} ) - { + if ( exists $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} ) { @select_feature_annotation_columns = - @{ $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} - }; + @{ $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} }; } ## Select outfile @@ -1040,23 +999,21 @@ sub analysis_mip_vcfparser_sv_wes { per_gene => $active_parameter_href->{sv_vcfparser_per_gene}, pli_values_file_path => $active_parameter_href->{vcfparser_pli_score_file}, range_feature_annotation_columns_ref => - \@{ $active_parameter_href->{sv_vcfparser_range_feature_annotation_columns} - }, - range_feature_file_path => - $active_parameter_href->{sv_vcfparser_range_feature_file}, + \@{ $active_parameter_href->{sv_vcfparser_range_feature_annotation_columns} }, + range_feature_file_path => $active_parameter_href->{sv_vcfparser_range_feature_file}, select_feature_annotation_columns_ref => \@select_feature_annotation_columns, - stderrfile_path => $recipe_file_path . $DOT . q{stderr.txt}, - stdoutfile_path => $outfile_path_prefix . $outfile_suffixes[0], - select_feature_file_path => $select_file, - select_feature_matching_column => $select_file_matching_column, - select_outfile => $select_outfile, + stderrfile_path => $recipe_file_path . $DOT . q{stderr.txt}, + stdoutfile_path => $outfile_path_prefix . $outfile_suffixes[0], + select_feature_file_path => $select_file, + select_feature_matching_column => $select_file_matching_column, + select_outfile => $select_outfile, } ); say {$filehandle} $NEWLINE; close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -1077,8 +1034,7 @@ sub analysis_mip_vcfparser_sv_wes { ## Collect databases(s) from a potentially merged gene panel file and adds them to sample_info set_gene_panel( { - aggregate_gene_panel_file => - $active_parameter_href->{$gene_panel_file}, + aggregate_gene_panel_file => $active_parameter_href->{$gene_panel_file}, aggregate_gene_panels_key => $gene_panel_key, recipe_name => $recipe_name, sample_info_href => $sample_info_href, @@ -1088,13 +1044,13 @@ sub analysis_mip_vcfparser_sv_wes { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -1208,13 +1164,13 @@ sub analysis_mip_vcfparser_sv_wgs { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::List qw{ check_element_exist_hash_of_array }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Gatk qw{ gatk_concatenate_variants }; use MIP::Program::Mip qw{ mip_vcfparser }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script}; ### PREPROCESSING: @@ -1239,17 +1195,10 @@ sub analysis_mip_vcfparser_sv_wgs { my %infile_path = %{ $io{in}{file_path_href} }; my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -1258,8 +1207,7 @@ sub analysis_mip_vcfparser_sv_wgs { my @vcfparser_analysis_types = get_vcf_parser_analysis_suffix( { - vcfparser_outfile_count => - $active_parameter_href->{sv_vcfparser_outfile_count}, + vcfparser_outfile_count => $active_parameter_href->{sv_vcfparser_outfile_count}, } ); @@ -1270,7 +1218,7 @@ sub analysis_mip_vcfparser_sv_wgs { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -1298,12 +1246,12 @@ sub analysis_mip_vcfparser_sv_wgs { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -1318,7 +1266,7 @@ sub analysis_mip_vcfparser_sv_wgs { ## Create file commands for xargs ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -1337,10 +1285,8 @@ sub analysis_mip_vcfparser_sv_wgs { $padding = $ANNOTATION_DISTANCE_MT; } - my $log_file_path = - catfile( $outdir_path, q{vcfparser} . $UNDERSCORE . $contig . q{.log} ); - my $vcfparser_outfile_path = - $outfile_path_prefix . $DOT . $contig . $outfile_suffix; + my $log_file_path = catfile( $outdir_path, q{vcfparser} . $UNDERSCORE . $contig . q{.log} ); + my $vcfparser_outfile_path = $outfile_path_prefix . $DOT . $contig . $outfile_suffix; my $vcfparser_xargs_file_path_prefix = $xargs_file_path_prefix . $DOT . $contig; my @select_feature_annotation_columns; my $select_file; @@ -1361,51 +1307,42 @@ sub analysis_mip_vcfparser_sv_wgs { { ## List of genes to analyse separately - $select_file = - catfile( $active_parameter_href->{sv_vcfparser_select_file} ); + $select_file = catfile( $active_parameter_href->{sv_vcfparser_select_file} ); ## Column of HGNC Symbol in select file ("-sf") $select_file_matching_column = $active_parameter_href->{sv_vcfparser_select_file_matching_column}; if ( - exists $active_parameter_href - ->{sv_vcfparser_select_feature_annotation_columns} ) + exists $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} + ) { @select_feature_annotation_columns = - @{ $active_parameter_href - ->{sv_vcfparser_select_feature_annotation_columns} }; + @{ $active_parameter_href->{sv_vcfparser_select_feature_annotation_columns} }; } ## Select outfile - $select_outfile = - $outfile_path_prefix . $DOT . $contig . $outfile_suffixes[1]; + $select_outfile = $outfile_path_prefix . $DOT . $contig . $outfile_suffixes[1]; } } mip_vcfparser( { - filehandle => $xargsfilehandle, - infile_path => $infile_path{$contig}, - log_file_path => $log_file_path, - padding => $padding, - parse_vep => $active_parameter_href->{sv_varianteffectpredictor}, - per_gene => $active_parameter_href->{sv_vcfparser_per_gene}, - pli_values_file_path => - $active_parameter_href->{vcfparser_pli_score_file}, - range_feature_annotation_columns_ref => \@{ - $active_parameter_href - ->{sv_vcfparser_range_feature_annotation_columns} - }, + filehandle => $xargsfilehandle, + infile_path => $infile_path{$contig}, + log_file_path => $log_file_path, + padding => $padding, + parse_vep => $active_parameter_href->{sv_varianteffectpredictor}, + per_gene => $active_parameter_href->{sv_vcfparser_per_gene}, + pli_values_file_path => $active_parameter_href->{vcfparser_pli_score_file}, + range_feature_annotation_columns_ref => + \@{ $active_parameter_href->{sv_vcfparser_range_feature_annotation_columns} }, range_feature_file_path => $active_parameter_href->{sv_vcfparser_range_feature_file}, - select_feature_annotation_columns_ref => - \@select_feature_annotation_columns, - stderrfile_path => $vcfparser_xargs_file_path_prefix - . $DOT - . q{stderr.txt}, - stdoutfile_path => $vcfparser_outfile_path, + select_feature_annotation_columns_ref => \@select_feature_annotation_columns, + stderrfile_path => $vcfparser_xargs_file_path_prefix . $DOT . q{stderr.txt}, + stdoutfile_path => $vcfparser_outfile_path, select_feature_file_path => $select_file, select_feature_matching_column => $select_file_matching_column, select_outfile => $select_outfile, @@ -1456,7 +1393,7 @@ sub analysis_mip_vcfparser_sv_wgs { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $outfile_sample_info_prefix = $outfile_path_prefix . $DOT . $contigs[0] . $outfile_suffix; @@ -1480,8 +1417,7 @@ sub analysis_mip_vcfparser_sv_wgs { ## Collect databases(s) from a potentially merged gene panel file and adds them to sample_info set_gene_panel( { - aggregate_gene_panel_file => - $active_parameter_href->{$gene_panel_file}, + aggregate_gene_panel_file => $active_parameter_href->{$gene_panel_file}, aggregate_gene_panels_key => $gene_panel_key, recipe_name => $recipe_name, sample_info_href => $sample_info_href, @@ -1491,13 +1427,13 @@ sub analysis_mip_vcfparser_sv_wgs { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Mip_vercollect.pm b/lib/MIP/Recipes/Analysis/Mip_vercollect.pm index 901607203..bf05a26e1 100644 --- a/lib/MIP/Recipes/Analysis/Mip_vercollect.pm +++ b/lib/MIP/Recipes/Analysis/Mip_vercollect.pm @@ -109,10 +109,10 @@ sub analysis_mip_vercollect { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Io::Write qw{ write_to_file }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -122,24 +122,17 @@ sub analysis_mip_vercollect { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -160,12 +153,12 @@ sub analysis_mip_vercollect { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, set_pipefail => 0, @@ -197,7 +190,7 @@ sub analysis_mip_vercollect { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -222,7 +215,7 @@ sub analysis_mip_vercollect { base_command => $profile_base_command, dependency_method => q{add_to_all}, job_dependency_type => q{afterok}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Multiqc.pm b/lib/MIP/Recipes/Analysis/Multiqc.pm index e06933975..2fa644a39 100644 --- a/lib/MIP/Recipes/Analysis/Multiqc.pm +++ b/lib/MIP/Recipes/Analysis/Multiqc.pm @@ -111,7 +111,7 @@ sub analysis_multiqc { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Multiqc qw{ multiqc }; @@ -124,17 +124,10 @@ sub analysis_multiqc { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -147,12 +140,12 @@ sub analysis_multiqc { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -178,8 +171,7 @@ sub analysis_multiqc { foreach my $report_id (@report_ids) { ## Assign directories - my $outdir_path = - catdir( $active_parameter_href->{outdata_dir}, $report_id, $recipe_name ); + my $outdir_path = catdir( $active_parameter_href->{outdata_dir}, $report_id, $recipe_name ); ## Analyse sample id only for this report if ( $report_id ne $case_id ) { @@ -197,7 +189,7 @@ sub analysis_multiqc { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my %multiqc_outfile_format = ( html => catfile( $outdir_path, q{multiqc_report.html} ), @@ -231,14 +223,14 @@ sub analysis_multiqc { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, dependency_method => q{add_to_all}, job_dependency_type => q{afterok}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Peddy.pm b/lib/MIP/Recipes/Analysis/Peddy.pm index e46935dce..8b328f90a 100644 --- a/lib/MIP/Recipes/Analysis/Peddy.pm +++ b/lib/MIP/Recipes/Analysis/Peddy.pm @@ -120,7 +120,7 @@ sub analysis_peddy { use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view_and_index_vcf }; @@ -148,17 +148,10 @@ sub analysis_peddy { my $infile_path = $io{in}{file_path}; my $genome_reference_version = $file_info_href->{human_genome_reference_version}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -175,7 +168,7 @@ sub analysis_peddy { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -199,12 +192,12 @@ sub analysis_peddy { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -243,7 +236,7 @@ sub analysis_peddy { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { PEDDY_OUTPUT_FILES: while ( my ( $outfile_tag, $outfile_path ) = each %outfile_path ) { @@ -272,13 +265,13 @@ sub analysis_peddy { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Picardtools_collecthsmetrics.pm b/lib/MIP/Recipes/Analysis/Picardtools_collecthsmetrics.pm index 25a72661f..b8c9384a8 100644 --- a/lib/MIP/Recipes/Analysis/Picardtools_collecthsmetrics.pm +++ b/lib/MIP/Recipes/Analysis/Picardtools_collecthsmetrics.pm @@ -131,11 +131,11 @@ sub analysis_picardtools_collecthsmetrics { use MIP::Active_parameter qw{ get_exome_target_bed_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Java qw{ java_core }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_collecthsmetrics }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -160,17 +160,10 @@ sub analysis_picardtools_collecthsmetrics { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -179,7 +172,7 @@ sub analysis_picardtools_collecthsmetrics { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -206,12 +199,12 @@ sub analysis_picardtools_collecthsmetrics { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -247,7 +240,7 @@ sub analysis_picardtools_collecthsmetrics { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -262,14 +255,14 @@ sub analysis_picardtools_collecthsmetrics { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Picardtools_collectmultiplemetrics.pm b/lib/MIP/Recipes/Analysis/Picardtools_collectmultiplemetrics.pm index 238b1ea11..ad90f2620 100644 --- a/lib/MIP/Recipes/Analysis/Picardtools_collectmultiplemetrics.pm +++ b/lib/MIP/Recipes/Analysis/Picardtools_collectmultiplemetrics.pm @@ -130,11 +130,11 @@ sub analysis_picardtools_collectmultiplemetrics { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Java qw{ java_core }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_collectmultiplemetrics }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -159,17 +159,10 @@ sub analysis_picardtools_collectmultiplemetrics { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -178,7 +171,7 @@ sub analysis_picardtools_collectmultiplemetrics { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -200,13 +193,13 @@ sub analysis_picardtools_collectmultiplemetrics { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, job_id_href => $job_id_href, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -221,8 +214,7 @@ sub analysis_picardtools_collectmultiplemetrics { { filehandle => $filehandle, infile_path => $infile_path, - java_jar => - catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), + java_jar => catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), java_use_large_pages => $active_parameter_href->{java_use_large_pages}, memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, outfile_path => $outfile_path_prefix, @@ -234,13 +226,11 @@ sub analysis_picardtools_collectmultiplemetrics { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use my %metric_outfile_path = ( - collectmultiplemetrics => $outfile_path_prefix - . $DOT - . q{alignment_summary_metrics}, + collectmultiplemetrics => $outfile_path_prefix . $DOT . q{alignment_summary_metrics}, collectmultiplemetricsinsertsize => $outfile_path_prefix . $DOT . q{insert_size_metrics}, @@ -261,13 +251,13 @@ sub analysis_picardtools_collectmultiplemetrics { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Picardtools_collectrnaseqmetrics.pm b/lib/MIP/Recipes/Analysis/Picardtools_collectrnaseqmetrics.pm index d8cbd213c..2861966d4 100644 --- a/lib/MIP/Recipes/Analysis/Picardtools_collectrnaseqmetrics.pm +++ b/lib/MIP/Recipes/Analysis/Picardtools_collectrnaseqmetrics.pm @@ -130,10 +130,10 @@ sub analysis_picardtools_collectrnaseqmetrics { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_collectrnaseqmetrics }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -158,17 +158,10 @@ sub analysis_picardtools_collectrnaseqmetrics { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -177,7 +170,7 @@ sub analysis_picardtools_collectrnaseqmetrics { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -204,12 +197,12 @@ sub analysis_picardtools_collectrnaseqmetrics { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -231,8 +224,7 @@ sub analysis_picardtools_collectrnaseqmetrics { filehandle => $filehandle, gene_annotation_file_path => $transcript_annotation . $refflat_ending, infile_path => $infile_path, - java_jar => - catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), + java_jar => catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), java_use_large_pages => $active_parameter_href->{java_use_large_pages}, memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, outfile_path => $outfile_path, @@ -243,7 +235,7 @@ sub analysis_picardtools_collectrnaseqmetrics { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -258,17 +250,17 @@ sub analysis_picardtools_collectrnaseqmetrics { } close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Picardtools_mergesamfiles.pm b/lib/MIP/Recipes/Analysis/Picardtools_mergesamfiles.pm index af52ff741..9c71ddb8d 100644 --- a/lib/MIP/Recipes/Analysis/Picardtools_mergesamfiles.pm +++ b/lib/MIP/Recipes/Analysis/Picardtools_mergesamfiles.pm @@ -150,7 +150,7 @@ sub analysis_picardtools_mergesamfiles { use MIP::Cluster qw{ get_parallel_processes update_memory_allocation }; use MIP::File_info qw{ set_merged_infile_prefix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -181,27 +181,20 @@ sub analysis_picardtools_mergesamfiles { my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; my @infile_paths = @{ $io{in}{file_paths} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; ## Extract lanes my $lanes_id = join $EMPTY_STR, @{ $file_info_href->{$sample_id}{lanes} }; @@ -230,7 +223,7 @@ sub analysis_picardtools_mergesamfiles { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -285,7 +278,7 @@ sub analysis_picardtools_mergesamfiles { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -449,7 +442,7 @@ sub analysis_picardtools_mergesamfiles { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $qc_outfile_path = $outfile_paths[0]; set_recipe_outfile_in_sample_info( @@ -467,7 +460,7 @@ sub analysis_picardtools_mergesamfiles { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Plink.pm b/lib/MIP/Recipes/Analysis/Plink.pm index 4efe7bde6..05a2202f8 100644 --- a/lib/MIP/Recipes/Analysis/Plink.pm +++ b/lib/MIP/Recipes/Analysis/Plink.pm @@ -129,12 +129,12 @@ sub analysis_plink { use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_annotate bcftools_sort bcftools_norm bcftools_view }; use MIP::Program::Plink qw{ plink_calculate_inbreeding plink_check_sex_chroms plink_create_mibs plink_fix_fam_ped_map_freq plink_sex_check plink_variant_pruning }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info set_recipe_metafile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -161,18 +161,11 @@ sub analysis_plink { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my $human_genome_reference_version = $file_info_href->{human_genome_reference_version}; my $human_genome_reference_source = $file_info_href->{human_genome_reference_source}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my @sample_ids = @{ $active_parameter_href->{sample_ids} }; - my %recipe_resource = get_recipe_resources( + my @sample_ids = @{ $active_parameter_href->{sample_ids} }; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -221,7 +214,7 @@ sub analysis_plink { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -246,12 +239,12 @@ sub analysis_plink { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -310,7 +303,7 @@ sub analysis_plink { ); print {$filehandle} $PIPE . $SPACE; - my $sort_memory = $recipe_resource{memory} - 2; + my $sort_memory = $recipe{memory} - 2; bcftools_sort( { filehandle => $filehandle, @@ -479,7 +472,7 @@ sub analysis_plink { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { while ( my ( $outfile_tag, $outfile_path ) = each %outfile_path ) { @@ -499,7 +492,7 @@ sub analysis_plink { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Prepareforvariantannotationblock.pm b/lib/MIP/Recipes/Analysis/Prepareforvariantannotationblock.pm index 1ab6c73f8..05d0c3fad 100644 --- a/lib/MIP/Recipes/Analysis/Prepareforvariantannotationblock.pm +++ b/lib/MIP/Recipes/Analysis/Prepareforvariantannotationblock.pm @@ -134,7 +134,7 @@ sub analysis_prepareforvariantannotationblock { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view }; @@ -163,17 +163,10 @@ sub analysis_prepareforvariantannotationblock { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my @contigs = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -184,7 +177,7 @@ sub analysis_prepareforvariantannotationblock { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -208,7 +201,7 @@ sub analysis_prepareforvariantannotationblock { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar @{ $file_info_href->{contigs} }, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); ## Update memory depending on how many cores that are being used @@ -216,7 +209,7 @@ sub analysis_prepareforvariantannotationblock { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -229,7 +222,7 @@ sub analysis_prepareforvariantannotationblock { directory_id => $case_id, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -280,17 +273,17 @@ sub analysis_prepareforvariantannotationblock { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Preseq.pm b/lib/MIP/Recipes/Analysis/Preseq.pm index 4e98ddef0..180cb9550 100644 --- a/lib/MIP/Recipes/Analysis/Preseq.pm +++ b/lib/MIP/Recipes/Analysis/Preseq.pm @@ -120,10 +120,10 @@ sub analysis_preseq { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Preseq qw{ preseq_lc_extrap }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -149,17 +149,10 @@ sub analysis_preseq { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -168,7 +161,7 @@ sub analysis_preseq { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -190,12 +183,12 @@ sub analysis_preseq { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -219,7 +212,7 @@ sub analysis_preseq { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -234,13 +227,13 @@ sub analysis_preseq { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Rankvariant.pm b/lib/MIP/Recipes/Analysis/Rankvariant.pm index f0e65d746..7e5fdd815 100644 --- a/lib/MIP/Recipes/Analysis/Rankvariant.pm +++ b/lib/MIP/Recipes/Analysis/Rankvariant.pm @@ -134,13 +134,11 @@ sub analysis_rankvariant { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; - use MIP::Program::Genmod - qw{ genmod_annotate genmod_compound genmod_models genmod_score }; - use MIP::Sample_info - qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; + use MIP::Program::Genmod qw{ genmod_annotate genmod_compound genmod_models genmod_score }; + use MIP::Sample_info qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Script::Setup_script qw{ setup_script }; @@ -166,18 +164,11 @@ sub analysis_rankvariant { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -187,7 +178,7 @@ sub analysis_rankvariant { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -213,7 +204,7 @@ sub analysis_rankvariant { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar keys %infile_path, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); ## Update memory depending on how many cores that are being used @@ -221,7 +212,7 @@ sub analysis_rankvariant { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -234,7 +225,7 @@ sub analysis_rankvariant { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -318,11 +309,11 @@ sub analysis_rankvariant { genmod_models( { - filehandle => $xargsfilehandle, - case_file => $case_file_path, - case_type => $active_parameter_href->{genmod_models_case_type}, - infile_path => $genmod_indata, - outfile_path => $genmod_outfile_path, + filehandle => $xargsfilehandle, + case_file => $case_file_path, + case_type => $active_parameter_href->{genmod_models_case_type}, + infile_path => $genmod_indata, + outfile_path => $genmod_outfile_path, reduced_penetrance_file_path => $active_parameter_href->{genmod_models_reduced_penetrance_file}, stderrfile_path => $models_stderrfile_path, @@ -337,8 +328,7 @@ sub analysis_rankvariant { ## Genmod Score $genmod_module .= $UNDERSCORE . q{score}; - my $score_stderrfile_path = - $stderrfile_path_prefix . $genmod_module . $DOT . q{stderr.txt}; + my $score_stderrfile_path = $stderrfile_path_prefix . $genmod_module . $DOT . q{stderr.txt}; genmod_score( { @@ -378,7 +368,7 @@ sub analysis_rankvariant { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -392,15 +382,14 @@ sub analysis_rankvariant { if ( defined $active_parameter_href->{rank_model_file} ) { my ($rank_model_version) = - $active_parameter_href->{rank_model_file} =~ - m/ v(\d+[.]\d+[.]\d+ | \d+[.]\d+) /sxm; + $active_parameter_href->{rank_model_file} =~ m/ v(\d+[.]\d+[.]\d+ | \d+[.]\d+) /sxm; set_recipe_metafile_in_sample_info( { - file => basename( $active_parameter_href->{rank_model_file} ), - metafile_tag => q{rank_model}, - path => $active_parameter_href->{rank_model_file}, - recipe_name => q{genmod}, + file => basename( $active_parameter_href->{rank_model_file} ), + metafile_tag => q{rank_model}, + path => $active_parameter_href->{rank_model_file}, + recipe_name => q{genmod}, sample_info_href => $sample_info_href, version => $rank_model_version, } @@ -408,13 +397,13 @@ sub analysis_rankvariant { } submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -527,13 +516,11 @@ sub analysis_rankvariant_unaffected { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; - use MIP::Program::Genmod - qw{ genmod_annotate genmod_compound genmod_models genmod_score }; - use MIP::Sample_info - qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; + use MIP::Program::Genmod qw{ genmod_annotate genmod_compound genmod_models genmod_score }; + use MIP::Sample_info qw{ set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Script::Setup_script qw{ setup_script }; @@ -559,18 +546,11 @@ sub analysis_rankvariant_unaffected { my $infile_name_prefix = $io{in}{file_name_prefix}; my %infile_path = %{ $io{in}{file_path_href} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -580,7 +560,7 @@ sub analysis_rankvariant_unaffected { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -606,7 +586,7 @@ sub analysis_rankvariant_unaffected { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar keys %infile_path, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); ## Update memory depending on how many cores that are being used @@ -614,7 +594,7 @@ sub analysis_rankvariant_unaffected { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -627,7 +607,7 @@ sub analysis_rankvariant_unaffected { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -708,7 +688,7 @@ sub analysis_rankvariant_unaffected { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -722,15 +702,14 @@ sub analysis_rankvariant_unaffected { if ( defined $active_parameter_href->{rank_model_file} ) { my ($rank_model_version) = - $active_parameter_href->{rank_model_file} =~ - m/ v(\d+[.]\d+[.]\d+ | \d+[.]\d+) /sxm; + $active_parameter_href->{rank_model_file} =~ m/ v(\d+[.]\d+[.]\d+ | \d+[.]\d+) /sxm; set_recipe_metafile_in_sample_info( { - file => basename( $active_parameter_href->{rank_model_file} ), - metafile_tag => q{rank_model}, - path => $active_parameter_href->{rank_model_file}, - recipe_name => q{genmod}, + file => basename( $active_parameter_href->{rank_model_file} ), + metafile_tag => q{rank_model}, + path => $active_parameter_href->{rank_model_file}, + recipe_name => q{genmod}, sample_info_href => $sample_info_href, version => $rank_model_version, } @@ -738,13 +717,13 @@ sub analysis_rankvariant_unaffected { } submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -856,14 +835,12 @@ sub analysis_rankvariant_sv { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Pedigree qw{ create_fam_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; - use MIP::Program::Genmod - qw{ genmod_annotate genmod_compound genmod_models genmod_score }; - use MIP::Sample_info - qw{ set_recipe_outfile_in_sample_info set_recipe_metafile_in_sample_info }; + use MIP::Program::Genmod qw{ genmod_annotate genmod_compound genmod_models genmod_score }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info set_recipe_metafile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -885,25 +862,17 @@ sub analysis_rankvariant_sv { my $infile_name_prefix = $io{in}{file_name_prefix}; my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); my @vcfparser_analysis_types = get_vcf_parser_analysis_suffix( { - vcfparser_outfile_count => - $active_parameter_href->{sv_vcfparser_outfile_count}, + vcfparser_outfile_count => $active_parameter_href->{sv_vcfparser_outfile_count}, } ); @@ -915,7 +884,7 @@ sub analysis_rankvariant_sv { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -939,12 +908,12 @@ sub analysis_rankvariant_sv { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -993,15 +962,12 @@ sub analysis_rankvariant_sv { my $genmod_module = $UNDERSCORE . q{annotate}; genmod_annotate( { - annotate_region => $active_parameter_href->{sv_genmod_annotate_regions}, - filehandle => $filehandle, - genome_build => $file_info_href->{human_genome_reference_version}, - infile_path => $genmod_indata, - outfile_path => $genmod_outfile_path, - stderrfile_path => $recipe_info_path - . $genmod_module - . $DOT - . q{stderr.txt}, + annotate_region => $active_parameter_href->{sv_genmod_annotate_regions}, + filehandle => $filehandle, + genome_build => $file_info_href->{human_genome_reference_version}, + infile_path => $genmod_indata, + outfile_path => $genmod_outfile_path, + stderrfile_path => $recipe_info_path . $genmod_module . $DOT . q{stderr.txt}, temp_directory_path => $temp_directory, verbosity => q{v}, } @@ -1035,7 +1001,7 @@ sub analysis_rankvariant_sv { thread_number => 4, vep => $use_vep, verbosity => q{v}, - whole_gene => $active_parameter_href->{sv_genmod_models_whole_gene}, + whole_gene => $active_parameter_href->{sv_genmod_models_whole_gene}, } ); @@ -1046,11 +1012,11 @@ sub analysis_rankvariant_sv { $genmod_module .= $UNDERSCORE . q{score}; genmod_score( { - filehandle => $filehandle, - case_file => $fam_file_path, - case_type => $active_parameter_href->{sv_genmod_models_case_type}, - infile_path => $genmod_indata, - outfile_path => catfile( dirname( devnull() ), q{stdout} ), + filehandle => $filehandle, + case_file => $fam_file_path, + case_type => $active_parameter_href->{sv_genmod_models_case_type}, + infile_path => $genmod_indata, + outfile_path => catfile( dirname( devnull() ), q{stdout} ), rank_model_file_path => $active_parameter_href->{sv_rank_model_file}, rank_result => 1, stderrfile_path => $recipe_info_path @@ -1085,7 +1051,7 @@ sub analysis_rankvariant_sv { say {$filehandle} $AMPERSAND . $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -1100,7 +1066,7 @@ sub analysis_rankvariant_sv { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Add to sample_info if ( defined $active_parameter_href->{sv_rank_model_file} ) { @@ -1111,7 +1077,7 @@ sub analysis_rankvariant_sv { set_recipe_metafile_in_sample_info( { - file => basename( $active_parameter_href->{sv_rank_model_file} ), + file => basename( $active_parameter_href->{sv_rank_model_file} ), metafile_tag => q{sv_rank_model}, path => $active_parameter_href->{sv_rank_model_file}, recipe_name => q{sv_genmod}, @@ -1123,13 +1089,13 @@ sub analysis_rankvariant_sv { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -1240,12 +1206,11 @@ sub analysis_rankvariant_sv_unaffected { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Genmod qw{ genmod_annotate }; - use MIP::Sample_info - qw{ set_recipe_outfile_in_sample_info set_recipe_metafile_in_sample_info }; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info set_recipe_metafile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -1268,25 +1233,17 @@ sub analysis_rankvariant_sv_unaffected { my $infile_name_prefix = $io{in}{file_name_prefix}; my @infile_paths = @{ $io{in}{file_paths} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); my @vcfparser_analysis_types = get_vcf_parser_analysis_suffix( { - vcfparser_outfile_count => - $active_parameter_href->{sv_vcfparser_outfile_count}, + vcfparser_outfile_count => $active_parameter_href->{sv_vcfparser_outfile_count}, } ); @@ -1298,7 +1255,7 @@ sub analysis_rankvariant_sv_unaffected { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -1321,12 +1278,12 @@ sub analysis_rankvariant_sv_unaffected { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -1362,7 +1319,7 @@ sub analysis_rankvariant_sv_unaffected { say {$filehandle} $AMPERSAND . $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -1377,7 +1334,7 @@ sub analysis_rankvariant_sv_unaffected { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Add to Sample_info if ( defined $active_parameter_href->{sv_rank_model_file} ) { @@ -1388,7 +1345,7 @@ sub analysis_rankvariant_sv_unaffected { set_recipe_metafile_in_sample_info( { - file => basename( $active_parameter_href->{sv_rank_model_file} ), + file => basename( $active_parameter_href->{sv_rank_model_file} ), metafile_tag => q{sv_rank_model}, path => $active_parameter_href->{sv_rank_model_file}, recipe_name => q{sv_genmod}, @@ -1401,13 +1358,13 @@ sub analysis_rankvariant_sv_unaffected { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Rhocall.pm b/lib/MIP/Recipes/Analysis/Rhocall.pm index 2e5b72d17..6fe48de3f 100644 --- a/lib/MIP/Recipes/Analysis/Rhocall.pm +++ b/lib/MIP/Recipes/Analysis/Rhocall.pm @@ -16,8 +16,7 @@ use autodie qw{ :all }; use Readonly; # MIPs lib/ -use MIP::Constants - qw{ $ASTERISK $DOT $LOG_NAME $NEWLINE $PIPE $SEMICOLON $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $ASTERISK $DOT $LOG_NAME $NEWLINE $PIPE $SEMICOLON $SPACE $UNDERSCORE }; BEGIN { @@ -132,12 +131,12 @@ sub analysis_rhocall_annotate { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_roh }; use MIP::Program::Rhocall qw{ rhocall_annotate }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Script::Setup_script qw{ setup_script }; @@ -163,28 +162,21 @@ sub analysis_rhocall_annotate { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -214,8 +206,8 @@ sub analysis_rhocall_annotate { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -264,7 +256,7 @@ sub analysis_rhocall_annotate { infile_path => $infile_path{$contig}, outfile_path => $roh_outfile_path, samples_ref => \@sample_ids, - skip_indels => 1, # Skip indels as their genotypes are enriched for errors + skip_indels => 1, # Skip indels as their genotypes are enriched for errors } ); print {$xargsfilehandle} $SEMICOLON . $SPACE; @@ -286,7 +278,7 @@ sub analysis_rhocall_annotate { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -298,13 +290,13 @@ sub analysis_rhocall_annotate { ); submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -409,7 +401,6 @@ sub analysis_rhocall_viz { use MIP::File::Path qw{ remove_file_path_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_index bcftools_roh bcftools_view }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; @@ -417,6 +408,7 @@ sub analysis_rhocall_viz { use MIP::Program::Rhocall qw{ rhocall_viz }; use MIP::Program::Ucsc qw{ ucsc_wig_to_big_wig }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Reference qw{ write_contigs_size_file }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info }; @@ -442,17 +434,10 @@ sub analysis_rhocall_viz { my $infile_path_prefix = $io{out}{file_path_prefix}; my $infile_path = $infile_path_prefix . q{.vcf.gz}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -461,7 +446,7 @@ sub analysis_rhocall_viz { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -484,12 +469,12 @@ sub analysis_rhocall_viz { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -533,18 +518,17 @@ sub analysis_rhocall_viz { filehandle => $filehandle, infile_path => $sample_vcf, outfile_path => $sample_outfile_path_prefix . q{.roh}, - skip_indels => 1, # Skip indels as their genotypes are enriched for errors + skip_indels => 1, # Skip indels as their genotypes are enriched for errors } ); say {$filehandle} $NEWLINE; picardtools_updatevcfsequencedictionary( { - filehandle => $filehandle, - infile_path => $sample_vcf, - java_jar => - catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), - outfile_path => $sample_outfile_path_prefix . q{.vcf}, + filehandle => $filehandle, + infile_path => $sample_vcf, + java_jar => catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), + outfile_path => $sample_outfile_path_prefix . q{.vcf}, sequence_dictionary => $active_parameter_href->{human_genome_reference}, } ); @@ -578,13 +562,11 @@ sub analysis_rhocall_viz { print {$filehandle} $NEWLINE; ## Create chromosome name and size file - my $contigs_size_file_path = - catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); + my $contigs_size_file_path = catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); write_contigs_size_file( { - fai_file_path => $active_parameter_href->{human_genome_reference} - . $DOT . q{fai}, - outfile_path => $contigs_size_file_path, + fai_file_path => $active_parameter_href->{human_genome_reference} . $DOT . q{fai}, + outfile_path => $contigs_size_file_path, } ); @@ -603,7 +585,7 @@ sub analysis_rhocall_viz { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -628,13 +610,13 @@ sub analysis_rhocall_viz { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Rseqc.pm b/lib/MIP/Recipes/Analysis/Rseqc.pm index 01fd49763..ef10c1064 100644 --- a/lib/MIP/Recipes/Analysis/Rseqc.pm +++ b/lib/MIP/Recipes/Analysis/Rseqc.pm @@ -137,7 +137,7 @@ sub analysis_rseqc { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Rseqc @@ -164,18 +164,11 @@ sub analysis_rseqc { my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $bed_file_path = $active_parameter_href->{rseqc_transcripts_file}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -185,7 +178,7 @@ sub analysis_rseqc { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, outdata_dir => $active_parameter_href->{outdata_dir}, @@ -207,12 +200,12 @@ sub analysis_rseqc { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -242,9 +235,7 @@ sub analysis_rseqc { bed_file_path => $bed_file_path, filehandle => $filehandle, infile_path => $infile_path, - outfiles_path_prefix => $outfile_path_prefix - . $UNDERSCORE - . q{junction_annotation}, + outfiles_path_prefix => $outfile_path_prefix . $UNDERSCORE . q{junction_annotation}, } ); say {$filehandle} $NEWLINE; @@ -255,9 +246,7 @@ sub analysis_rseqc { bed_file_path => $bed_file_path, filehandle => $filehandle, infile_path => $infile_path, - outfiles_path_prefix => $outfile_path_prefix - . $UNDERSCORE - . q{junction_saturation}, + outfiles_path_prefix => $outfile_path_prefix . $UNDERSCORE . q{junction_saturation}, } ); say {$filehandle} $NEWLINE; @@ -268,9 +257,7 @@ sub analysis_rseqc { bed_file_path => $bed_file_path, filehandle => $filehandle, infile_path => $infile_path, - outfiles_path_prefix => $outfile_path_prefix - . $UNDERSCORE - . q{inner_distance}, + outfiles_path_prefix => $outfile_path_prefix . $UNDERSCORE . q{inner_distance}, } ); say {$filehandle} $NEWLINE; @@ -280,10 +267,7 @@ sub analysis_rseqc { { filehandle => $filehandle, infile_path => $infile_path, - stdoutfile_path => $outfile_path_prefix - . $UNDERSCORE - . q{bam_stat} - . $outfile_suffix, + stdoutfile_path => $outfile_path_prefix . $UNDERSCORE . q{bam_stat} . $outfile_suffix, } ); say {$filehandle} $NEWLINE; @@ -307,26 +291,24 @@ sub analysis_rseqc { { filehandle => $filehandle, infile_path => $infile_path, - outfiles_path_prefix => $outfile_path_prefix - . $UNDERSCORE - . q{read_duplication}, + outfiles_path_prefix => $outfile_path_prefix . $UNDERSCORE . q{read_duplication}, } ); say {$filehandle} $NEWLINE; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_island}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_island}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Rtg_vcfeval.pm b/lib/MIP/Recipes/Analysis/Rtg_vcfeval.pm index 46c0b18e7..35b60c9c3 100644 --- a/lib/MIP/Recipes/Analysis/Rtg_vcfeval.pm +++ b/lib/MIP/Recipes/Analysis/Rtg_vcfeval.pm @@ -128,13 +128,13 @@ sub analysis_rtg_vcfeval { use MIP::Active_parameter qw{ get_exome_target_bed_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir gnu_rm }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bedtools qw{ bedtools_intersect }; use MIP::Program::Rtg qw{ rtg_vcfeval }; use MIP::Program::Bcftools qw{ bcftools_rename_vcf_samples }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_pedigree_sample_id_attributes set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -160,16 +160,8 @@ sub analysis_rtg_vcfeval { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); my $nist_id = $active_parameter_href->{nist_id}{$sample_id}; my @nist_versions = @{ $active_parameter_href->{nist_versions} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $sample_id_analysis_type = get_pedigree_sample_id_attributes( { attribute => q{analysis_type}, @@ -185,20 +177,21 @@ sub analysis_rtg_vcfeval { } ); - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -226,8 +219,8 @@ sub analysis_rtg_vcfeval { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -317,7 +310,7 @@ sub analysis_rtg_vcfeval { say {$filehandle} $NEWLINE; say {$filehandle} q{## Rtg vcfeval}; - my $rtg_memory = $recipe_resource{memory} - 1 . q{G}; + my $rtg_memory = $recipe{memory} - 1 . q{G}; rtg_vcfeval( { @@ -341,7 +334,7 @@ sub analysis_rtg_vcfeval { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -357,7 +350,7 @@ sub analysis_rtg_vcfeval { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Sacct.pm b/lib/MIP/Recipes/Analysis/Sacct.pm index 0c1b19e1b..18ff44811 100644 --- a/lib/MIP/Recipes/Analysis/Sacct.pm +++ b/lib/MIP/Recipes/Analysis/Sacct.pm @@ -110,9 +110,9 @@ sub analysis_sacct { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Slurm qw{ slurm_sacct }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -121,17 +121,10 @@ sub analysis_sacct { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -144,12 +137,12 @@ sub analysis_sacct { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory_allocation}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory_allocation}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -168,14 +161,14 @@ sub analysis_sacct { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, dependency_method => q{add_to_all}, job_dependency_type => q{afterany}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Salmon_quant.pm b/lib/MIP/Recipes/Analysis/Salmon_quant.pm index fa2a6a466..274f784c6 100644 --- a/lib/MIP/Recipes/Analysis/Salmon_quant.pm +++ b/lib/MIP/Recipes/Analysis/Salmon_quant.pm @@ -129,10 +129,10 @@ sub analysis_salmon_quant { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources}; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Salmon qw{ salmon_quant }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -154,34 +154,27 @@ sub analysis_salmon_quant { } ); my @infile_paths = @{ $io{in}{file_paths} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $referencefile_dir_path = $active_parameter_href->{salmon_quant_reference_genome} . $file_info_href->{salmon_quant_reference_genome}[0]; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my %recipe_resource = get_recipe_resources( + + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); ## Set outfile - my $recipe_dir = - catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); - my $file_path = catfile( $recipe_dir, $rec_atr{file_tag} . $rec_atr{outfile_suffix} ); + my $recipe_dir = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); + my $file_path = catfile( $recipe_dir, $recipe{file_tag} . $recipe{outfile_suffix} ); %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => [$file_path], @@ -204,14 +197,14 @@ sub analysis_salmon_quant { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, } ); @@ -279,7 +272,7 @@ sub analysis_salmon_quant { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -304,13 +297,13 @@ sub analysis_salmon_quant { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Sambamba_depth.pm b/lib/MIP/Recipes/Analysis/Sambamba_depth.pm index 5b30428b3..0dcea963a 100644 --- a/lib/MIP/Recipes/Analysis/Sambamba_depth.pm +++ b/lib/MIP/Recipes/Analysis/Sambamba_depth.pm @@ -127,7 +127,7 @@ sub analysis_sambamba_depth { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Sambamba qw{ sambamba_depth }; @@ -155,17 +155,10 @@ sub analysis_sambamba_depth { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -174,7 +167,7 @@ sub analysis_sambamba_depth { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -196,12 +189,12 @@ sub analysis_sambamba_depth { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -251,7 +244,7 @@ sub analysis_sambamba_depth { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -276,13 +269,13 @@ sub analysis_sambamba_depth { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Samtools_merge.pm b/lib/MIP/Recipes/Analysis/Samtools_merge.pm index 6ebf56c7f..adc65c0bd 100644 --- a/lib/MIP/Recipes/Analysis/Samtools_merge.pm +++ b/lib/MIP/Recipes/Analysis/Samtools_merge.pm @@ -145,11 +145,11 @@ sub analysis_samtools_merge { use MIP::File_info qw{ set_merged_infile_prefix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Samtools qw{ samtools_merge samtools_view }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; use MIP::Script::Setup_script qw{ setup_script }; @@ -171,26 +171,19 @@ sub analysis_samtools_merge { ); my @infile_paths = @{ $io{in}{file_paths} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $xargs_file_path_prefix; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; ## Extract lanes my $lanes_id = join $EMPTY_STR, @{ $file_info_href->{$sample_id}{lanes} }; @@ -219,7 +212,7 @@ sub analysis_samtools_merge { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -246,7 +239,7 @@ sub analysis_samtools_merge { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -345,7 +338,7 @@ sub analysis_samtools_merge { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $qc_outfile_path = $outfile_paths[0]; set_recipe_outfile_in_sample_info( @@ -363,7 +356,7 @@ sub analysis_samtools_merge { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -472,10 +465,10 @@ sub analysis_samtools_merge_panel { use MIP::File_info qw{ set_merged_infile_prefix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Samtools qw{ samtools_merge samtools_view }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -497,25 +490,18 @@ sub analysis_samtools_merge_panel { ); my @infile_paths = @{ $io{in}{file_paths} }; - my %rec_atr = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $rec_atr{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; - my $memory_allocation = $recipe_resource{memory}; + my $core_number = $recipe{core_number}; + my $memory_allocation = $recipe{memory}; ## Assign suffix - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; ## Extract lanes my $lanes_id = join $EMPTY_STR, @{ $file_info_href->{$sample_id}{lanes} }; @@ -539,7 +525,7 @@ sub analysis_samtools_merge_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => [$outfile_path], @@ -563,7 +549,7 @@ sub analysis_samtools_merge_panel { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -618,7 +604,7 @@ sub analysis_samtools_merge_panel { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -635,7 +621,7 @@ sub analysis_samtools_merge_panel { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Samtools_subsample_mt.pm b/lib/MIP/Recipes/Analysis/Samtools_subsample_mt.pm index 448d40e34..1c8bbad56 100644 --- a/lib/MIP/Recipes/Analysis/Samtools_subsample_mt.pm +++ b/lib/MIP/Recipes/Analysis/Samtools_subsample_mt.pm @@ -126,12 +126,12 @@ sub analysis_samtools_subsample_mt { use MIP::Environment::Executable qw{ get_executable_base_command }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Awk qw{ awk }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Samtools qw{ samtools_index samtools_view }; use MIP::Program::Bedtools qw{ bedtools_genomecov }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -161,23 +161,15 @@ sub analysis_samtools_subsample_mt { if ( not $infile_path ) { $log->warn( -qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_name} - ); + qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_name}); return 1; } - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); my $mt_subsample_depth = $active_parameter_href->{samtools_subsample_mt_depth}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -186,7 +178,7 @@ qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_na %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => \@infile_name_prefixes, @@ -210,12 +202,12 @@ qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_na my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -306,7 +298,7 @@ qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_na ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -332,13 +324,13 @@ qq{Mitochondrial contig is not part of analysis contig set - skipping $recipe_na submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Smncopynumbercaller.pm b/lib/MIP/Recipes/Analysis/Smncopynumbercaller.pm index 09c0f922c..97666fde0 100644 --- a/lib/MIP/Recipes/Analysis/Smncopynumbercaller.pm +++ b/lib/MIP/Recipes/Analysis/Smncopynumbercaller.pm @@ -16,8 +16,7 @@ use autodie qw{ :all }; use Readonly; ## MIPs lib/ -use MIP::Constants - qw{ $GENOME_VERSION $LOG_NAME $NEWLINE $SINGLE_QUOTE $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $GENOME_VERSION $LOG_NAME $NEWLINE $SINGLE_QUOTE $SPACE $UNDERSCORE }; BEGIN { @@ -114,12 +113,12 @@ sub analysis_smncopynumbercaller { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Perl qw{ perl_base }; use MIP::Program::Gnu::Coreutils qw{ gnu_echo }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Smncopynumbercaller qw{ smn_caller }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -129,17 +128,10 @@ sub analysis_smncopynumbercaller { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -168,7 +160,7 @@ sub analysis_smncopynumbercaller { my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -189,12 +181,12 @@ sub analysis_smncopynumbercaller { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -225,7 +217,7 @@ sub analysis_smncopynumbercaller { genome_version => $GENOME_VERSION, outfile_prefix => $outfile_name_prefix, outdir_path => $outdir_path_prefix, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); say {$filehandle} $NEWLINE; @@ -241,7 +233,7 @@ sub analysis_smncopynumbercaller { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -264,13 +256,13 @@ sub analysis_smncopynumbercaller { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -330,10 +322,7 @@ sub _use_sample_id_in_output { } ); push @perl_commands, - ( - $SINGLE_QUOTE, qq{s/$file_name_prefix/$sample_id/g}, - $SINGLE_QUOTE, $outfile_path - ); + ( $SINGLE_QUOTE, qq{s/$file_name_prefix/$sample_id/g}, $SINGLE_QUOTE, $outfile_path ); say {$filehandle} join $SPACE, @perl_commands; } return; diff --git a/lib/MIP/Recipes/Analysis/Split_fastq_file.pm b/lib/MIP/Recipes/Analysis/Split_fastq_file.pm index 7a0a33e1b..533c0d749 100644 --- a/lib/MIP/Recipes/Analysis/Split_fastq_file.pm +++ b/lib/MIP/Recipes/Analysis/Split_fastq_file.pm @@ -1,5 +1,6 @@ package MIP::Recipes::Analysis::Split_fastq_file; +use 5.026; use Carp; use charnames qw{ :full :short }; use English qw{ -no_match_vars }; @@ -127,10 +128,10 @@ sub analysis_split_fastq_file { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp gnu_mkdir gnu_mv gnu_rm gnu_split }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Pigz qw{ pigz }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -160,18 +161,11 @@ sub analysis_split_fastq_file { my $infile_suffix = $io{in}{file_constant_suffix}; my @temp_infile_path_prefixes = @{ $io{temp}{file_path_prefixes} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $sequence_read_batch = $active_parameter_href->{split_fastq_file_read_batch}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -187,15 +181,15 @@ sub analysis_split_fastq_file { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, temp_directory => $temp_directory, } ); @@ -236,7 +230,7 @@ sub analysis_split_fastq_file { decompress => 1, filehandle => $filehandle, infile_path => $infile_path, - processes => $recipe_resource{core_number}, + processes => $recipe{core_number}, stdout => 1, } ); @@ -316,14 +310,14 @@ sub analysis_split_fastq_file { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_island}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Star_aln.pm b/lib/MIP/Recipes/Analysis/Star_aln.pm index 0d1216ed2..dc0a32e77 100644 --- a/lib/MIP/Recipes/Analysis/Star_aln.pm +++ b/lib/MIP/Recipes/Analysis/Star_aln.pm @@ -16,8 +16,7 @@ use autodie qw{ :all }; use Readonly; ## MIPs lib/ -use MIP::Constants - qw{ $ASTERISK $COMMA $DOT $EMPTY_STR $LOG_NAME $NEWLINE $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $ASTERISK $COMMA $DOT $EMPTY_STR $LOG_NAME $NEWLINE $SPACE $UNDERSCORE }; BEGIN { @@ -129,12 +128,12 @@ sub analysis_star_aln { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv gnu_rm }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Samtools qw{ samtools_index }; use MIP::Program::Star qw{ star_aln }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_recipe_metafile_in_sample_info @@ -159,21 +158,20 @@ sub analysis_star_aln { ); my @infile_paths = @{ $io{in}{file_paths} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Build outfile_paths - my %rec_atr = get_recipe_attributes( +## Build outfile_paths + my %recipe = parse_recipe_prerequisites( { - parameter_href => $parameter_href, - recipe_name => $recipe_name, + active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, + recipe_name => $recipe_name, } ); - my $job_id_chain = $rec_atr{chain}; + my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); my $lanes_id = join $EMPTY_STR, @{ $file_info_href->{$sample_id}{lanes} }; my $outfile_tag = $file_info_href->{$sample_id}{$recipe_name}{file_tag}; - my $outfile_suffix = $rec_atr{outfile_suffix}; + my $outfile_suffix = $recipe{outfile_suffix}; my $outfile_path_prefix = catfile( $outsample_directory, $sample_id . $UNDERSCORE . q{lanes} . $UNDERSCORE . $lanes_id . $outfile_tag ); @@ -181,7 +179,7 @@ sub analysis_star_aln { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => [ $outfile_path_prefix . $outfile_suffix ], @@ -193,13 +191,6 @@ sub analysis_star_aln { my $outfile_name = ${ $io{out}{file_names} }[0]; my $outfile_path = $io{out}{file_path}; - my %recipe_resource = get_recipe_resources( - { - active_parameter_href => $active_parameter_href, - recipe_name => $recipe_name, - } - ); - ## Filehandles # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -208,12 +199,12 @@ sub analysis_star_aln { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -291,22 +282,21 @@ sub analysis_star_aln { $active_parameter_href->{library_type} eq q{unstranded} ? q{intronMotif} : undef; star_aln( { - align_intron_max => $active_parameter_href->{align_intron_max}, - align_mates_gap_max => $active_parameter_href->{align_mates_gap_max}, - align_sjdb_overhang_min => $active_parameter_href->{align_sjdb_overhang_min}, - chim_junction_overhang_min => - $active_parameter_href->{chim_junction_overhang_min}, - chim_out_type => $active_parameter_href->{chim_out_type}, - chim_segment_min => $active_parameter_href->{chim_segment_min}, - filehandle => $filehandle, - genome_dir_path => $referencefile_dir_path, - infile_paths_ref => \@fastq_files, - out_sam_attr_rgline => $out_sam_attr_rgline, - out_sam_strand_field => $out_sam_strand_field, - outfile_name_prefix => $outfile_path_prefix . $DOT, - pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, - thread_number => $recipe_resource{core_number}, - two_pass_mode => $active_parameter_href->{two_pass_mode}, + align_intron_max => $active_parameter_href->{align_intron_max}, + align_mates_gap_max => $active_parameter_href->{align_mates_gap_max}, + align_sjdb_overhang_min => $active_parameter_href->{align_sjdb_overhang_min}, + chim_junction_overhang_min => $active_parameter_href->{chim_junction_overhang_min}, + chim_out_type => $active_parameter_href->{chim_out_type}, + chim_segment_min => $active_parameter_href->{chim_segment_min}, + filehandle => $filehandle, + genome_dir_path => $referencefile_dir_path, + infile_paths_ref => \@fastq_files, + out_sam_attr_rgline => $out_sam_attr_rgline, + out_sam_strand_field => $out_sam_strand_field, + outfile_name_prefix => $outfile_path_prefix . $DOT, + pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, + thread_number => $recipe{core_number}, + two_pass_mode => $active_parameter_href->{two_pass_mode}, }, ); say {$filehandle} $NEWLINE; @@ -350,7 +340,7 @@ sub analysis_star_aln { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -375,13 +365,13 @@ sub analysis_star_aln { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -493,12 +483,12 @@ sub analysis_star_aln_mixed { use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv gnu_rm }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Samtools qw{ samtools_index }; use MIP::Program::Star qw{ star_aln }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_rg_header_line set_recipe_outfile_in_sample_info @@ -524,17 +514,10 @@ sub analysis_star_aln_mixed { } ); my @infile_paths = @{ $io{in}{file_paths} }; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -550,7 +533,7 @@ sub analysis_star_aln_mixed { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => $file_info_sample{no_direction_infile_prefixes}, @@ -598,14 +581,14 @@ sub analysis_star_aln_mixed { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, ulimit_n => $active_parameter_href->{star_ulimit_n}, } @@ -648,23 +631,21 @@ sub analysis_star_aln_mixed { : undef; star_aln( { - filehandle => $filehandle, - align_intron_max => $active_parameter_href->{align_intron_max}, - align_mates_gap_max => $active_parameter_href->{align_mates_gap_max}, - align_sjdb_overhang_min => - $active_parameter_href->{align_sjdb_overhang_min}, - chim_junction_overhang_min => - $active_parameter_href->{chim_junction_overhang_min}, - chim_out_type => $active_parameter_href->{chim_out_type}, - chim_segment_min => $active_parameter_href->{chim_segment_min}, - genome_dir_path => $referencefile_dir_path, - infile_paths_ref => \@fastq_files, - out_sam_attr_rgline => $out_sam_attr_rgline, - out_sam_strand_field => $out_sam_strand_field, - outfile_name_prefix => $outfile_path_prefix . $DOT, - pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, - thread_number => $recipe_resource{core_number}, - two_pass_mode => $active_parameter_href->{two_pass_mode}, + filehandle => $filehandle, + align_intron_max => $active_parameter_href->{align_intron_max}, + align_mates_gap_max => $active_parameter_href->{align_mates_gap_max}, + align_sjdb_overhang_min => $active_parameter_href->{align_sjdb_overhang_min}, + chim_junction_overhang_min => $active_parameter_href->{chim_junction_overhang_min}, + chim_out_type => $active_parameter_href->{chim_out_type}, + chim_segment_min => $active_parameter_href->{chim_segment_min}, + genome_dir_path => $referencefile_dir_path, + infile_paths_ref => \@fastq_files, + out_sam_attr_rgline => $out_sam_attr_rgline, + out_sam_strand_field => $out_sam_strand_field, + outfile_name_prefix => $outfile_path_prefix . $DOT, + pe_overlap_nbases_min => $active_parameter_href->{pe_overlap_nbases_min}, + thread_number => $recipe{core_number}, + two_pass_mode => $active_parameter_href->{two_pass_mode}, }, ); say {$filehandle} $NEWLINE; @@ -711,7 +692,7 @@ sub analysis_star_aln_mixed { ## Close filehandles close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -738,14 +719,13 @@ sub analysis_star_aln_mixed { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample_parallel}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => - $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample_parallel}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Star_caller.pm b/lib/MIP/Recipes/Analysis/Star_caller.pm index 0c68505bb..857936831 100644 --- a/lib/MIP/Recipes/Analysis/Star_caller.pm +++ b/lib/MIP/Recipes/Analysis/Star_caller.pm @@ -112,11 +112,11 @@ sub analysis_star_caller { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_echo }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Cyrius qw{ star_caller }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -126,17 +126,10 @@ sub analysis_star_caller { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -162,7 +155,7 @@ sub analysis_star_caller { my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -183,12 +176,12 @@ sub analysis_star_caller { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -219,7 +212,7 @@ sub analysis_star_caller { manifest_file_path => $manifest_file_path, outdir_path => $outdir_path_prefix, outfile_prefix => $outfile_name_prefix, - thread_number => $recipe_resource{core_number}, + thread_number => $recipe{core_number}, } ); say {$filehandle} $NEWLINE; @@ -227,7 +220,7 @@ sub analysis_star_caller { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -250,13 +243,13 @@ sub analysis_star_caller { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Star_fusion.pm b/lib/MIP/Recipes/Analysis/Star_fusion.pm index 61904ee73..0417f4545 100644 --- a/lib/MIP/Recipes/Analysis/Star_fusion.pm +++ b/lib/MIP/Recipes/Analysis/Star_fusion.pm @@ -128,11 +128,11 @@ sub analysis_star_fusion { use MIP::File::Format::Star_fusion qw{ create_star_fusion_sample_file }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cp }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Star_fusion qw{ star_fusion }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -159,29 +159,22 @@ sub analysis_star_fusion { my @infile_paths = @{ $io{in}{file_paths} }; ## Build outfile_paths - my %recipe_attribute = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $outdir_path = - catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); - my $outsample_name = $STAR_FUSION_PREFIX . $recipe_attribute{outfile_suffix}; - my @file_paths = catfile( $outdir_path, $outsample_name ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); + my $outdir_path = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); + my $outsample_name = $STAR_FUSION_PREFIX . $recipe{outfile_suffix}; + my @file_paths = catfile( $outdir_path, $outsample_name ); %io = ( %io, parse_io_outfiles( { - chain_id => $recipe_attribute{chain}, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@file_paths, @@ -199,14 +192,14 @@ sub analysis_star_fusion { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, temp_directory => $temp_directory, ulimit_n => $active_parameter_href->{star_ulimit_n}, } @@ -232,13 +225,12 @@ sub analysis_star_fusion { star_fusion( { - cpu => $recipe_resource{core_number}, + cpu => $recipe{core_number}, examine_coding_effect => 1, filehandle => $filehandle, fusion_inspector => q{inspect}, genome_lib_dir_path => $active_parameter_href->{star_fusion_genome_lib_dir}, - min_junction_reads => - $active_parameter_href->{star_fusion_min_junction_reads}, + min_junction_reads => $active_parameter_href->{star_fusion_min_junction_reads}, output_directory_path => $outdir_path, samples_file_path => $sample_files_path, } @@ -248,7 +240,7 @@ sub analysis_star_fusion { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( { @@ -271,13 +263,13 @@ sub analysis_star_fusion { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_island}, - job_id_chain => $recipe_attribute{chain}, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Stringtie.pm b/lib/MIP/Recipes/Analysis/Stringtie.pm index 98d79c1c8..5e0110510 100644 --- a/lib/MIP/Recipes/Analysis/Stringtie.pm +++ b/lib/MIP/Recipes/Analysis/Stringtie.pm @@ -135,10 +135,10 @@ sub analysis_stringtie { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Stringtie qw{ stringtie }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; @@ -165,18 +165,11 @@ sub analysis_stringtie { my $infile_name = $infile_name_prefix . $infile_suffix; my $infile_path = $infile_path_prefix . $infile_suffix; - my %recipe_attribute = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $job_id_chain = $recipe_attribute{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; my $annotationfile_path = $active_parameter_href->{transcript_annotation}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -187,7 +180,7 @@ sub analysis_stringtie { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], id => $sample_id, @@ -211,12 +204,12 @@ sub analysis_stringtie { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -234,13 +227,13 @@ sub analysis_stringtie { . $outfile_suffix, filehandle => $filehandle, gene_abundance_outfile_path => $outfile_path_prefix . q{_gene_abound.txt}, - gtf_reference_path => $active_parameter_href->{transcript_annotation}, - infile_path => $infile_path, - junction_reads => $active_parameter_href->{stringtie_junction_reads}, - library_type => $active_parameter_href->{library_type}, - minimum_coverage => $active_parameter_href->{stringtie_minimum_coverage}, - outfile_path => $outfile_path, - threads => $recipe_resource{core_number}, + gtf_reference_path => $active_parameter_href->{transcript_annotation}, + infile_path => $infile_path, + junction_reads => $active_parameter_href->{stringtie_junction_reads}, + library_type => $active_parameter_href->{library_type}, + minimum_coverage => $active_parameter_href->{stringtie_minimum_coverage}, + outfile_path => $outfile_path, + threads => $recipe{core_number}, } ); say {$filehandle} $NEWLINE; @@ -248,7 +241,7 @@ sub analysis_stringtie { ## Close filehandle close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -273,13 +266,13 @@ sub analysis_stringtie { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Sv_annotate.pm b/lib/MIP/Recipes/Analysis/Sv_annotate.pm index 3e5bf326a..cb98ae3b7 100644 --- a/lib/MIP/Recipes/Analysis/Sv_annotate.pm +++ b/lib/MIP/Recipes/Analysis/Sv_annotate.pm @@ -128,7 +128,6 @@ sub analysis_sv_annotate { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw(gnu_mv); use MIP::Io::Read qw{ read_from_file }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -139,6 +138,7 @@ sub analysis_sv_annotate { use MIP::Program::Picardtools qw{ sort_vcf }; use MIP::Program::Svdb qw{ svdb_query }; use MIP::Program::Vcfanno qw{ vcfanno }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -164,19 +164,12 @@ sub analysis_sv_annotate { my $infile_path = $infile_path_prefix . $infile_suffix; my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $sequence_dict_file = catfile( $reference_dir, + my $sequence_dict_file = catfile( $reference_dir, $file_info_href->{human_genome_reference_name_prefix} . $DOT . q{dict} ); - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -185,7 +178,7 @@ sub analysis_sv_annotate { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -208,12 +201,12 @@ sub analysis_sv_annotate { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -255,11 +248,7 @@ sub analysis_sv_annotate { if ($annotation_file_counter) { $svdb_infile_path = - $outfile_path_prefix - . $alt_file_tag - . $outfile_suffix - . $DOT - . $outfile_tracker; + $outfile_path_prefix . $alt_file_tag . $outfile_suffix . $DOT . $outfile_tracker; ## Increment now that infile has been set $outfile_tracker++; @@ -324,10 +313,9 @@ sub analysis_sv_annotate { { active_parameter_href => $active_parameter_href, filehandle => $filehandle, - infile_paths_ref => - [ $outfile_path_prefix . $alt_file_tag . $outfile_suffix ], - outfile => $outfile_path_prefix . $outfile_alt_file_tag . $outfile_suffix, - sequence_dict_file => $sequence_dict_file, + infile_paths_ref => [ $outfile_path_prefix . $alt_file_tag . $outfile_suffix ], + outfile => $outfile_path_prefix . $outfile_alt_file_tag . $outfile_suffix, + sequence_dict_file => $sequence_dict_file, } ); say {$filehandle} $NEWLINE; @@ -340,7 +328,7 @@ sub analysis_sv_annotate { ## Build the exclude filter command my $exclude_filter = _build_bcftools_filter( { - annotations_ref => \@svdb_query_annotations, + annotations_ref => \@svdb_query_annotations, fqf_bcftools_filter_threshold => $active_parameter_href->{fqf_bcftools_filter_threshold}, } @@ -352,8 +340,8 @@ sub analysis_sv_annotate { apply_filters_ref => [qw{ PASS }], exclude => $exclude_filter, filehandle => $filehandle, - infile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, - outfile_path => $outfile_path_prefix + infile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, + outfile_path => $outfile_path_prefix . $alt_file_tag . $UNDERSCORE . q{filt} . $outfile_suffix, @@ -372,9 +360,9 @@ sub analysis_sv_annotate { say {$filehandle} q{## Remove common variants}; vcfanno( { - filehandle => $filehandle, - infile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, - luafile_path => $active_parameter_href->{vcfanno_functions}, + filehandle => $filehandle, + infile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, + luafile_path => $active_parameter_href->{vcfanno_functions}, stderrfile_path_append => $stderrfile_path, toml_configfile_path => $active_parameter_href->{sv_vcfanno_config}, } @@ -402,7 +390,7 @@ sub analysis_sv_annotate { ## Build the exclude filter command my $exclude_filter = _build_bcftools_filter( { - annotations_ref => \@vcfanno_annotations, + annotations_ref => \@vcfanno_annotations, fqf_bcftools_filter_threshold => $active_parameter_href->{fqf_bcftools_filter_threshold}, } @@ -410,11 +398,11 @@ sub analysis_sv_annotate { bcftools_filter( { - exclude => $exclude_filter, - filehandle => $filehandle, - infile_path => $DASH, - outfile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, - output_type => q{v}, + exclude => $exclude_filter, + filehandle => $filehandle, + infile_path => $DASH, + outfile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix, + output_type => q{v}, stderrfile_path_append => $stderrfile_path, } ); @@ -429,10 +417,9 @@ sub analysis_sv_annotate { { active_parameter_href => $active_parameter_href, filehandle => $filehandle, - infile_paths_ref => - [ $outfile_path_prefix . $alt_file_tag . $outfile_suffix ], - outfile => $outfile_path, - sequence_dict_file => $sequence_dict_file, + infile_paths_ref => [ $outfile_path_prefix . $alt_file_tag . $outfile_suffix ], + outfile => $outfile_path, + sequence_dict_file => $sequence_dict_file, } ); say {$filehandle} $NEWLINE; @@ -440,7 +427,7 @@ sub analysis_sv_annotate { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -452,13 +439,13 @@ sub analysis_sv_annotate { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Sv_combinevariantcallsets.pm b/lib/MIP/Recipes/Analysis/Sv_combinevariantcallsets.pm index 379e1dd8a..b29bfd38e 100644 --- a/lib/MIP/Recipes/Analysis/Sv_combinevariantcallsets.pm +++ b/lib/MIP/Recipes/Analysis/Sv_combinevariantcallsets.pm @@ -128,7 +128,7 @@ sub analysis_sv_combinevariantcallsets { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -149,20 +149,11 @@ sub analysis_sv_combinevariantcallsets { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - - my $recipe_mode = $active_parameter_href->{$recipe_name}; my @structural_variant_callers; ## Only process active callers - foreach my $structural_variant_caller ( - @{ $parameter_href->{cache}{structural_variant_callers} } ) + foreach + my $structural_variant_caller ( @{ $parameter_href->{cache}{structural_variant_callers} } ) { if ( $active_parameter_href->{$structural_variant_caller} ) { @@ -170,9 +161,10 @@ sub analysis_sv_combinevariantcallsets { } } - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -180,7 +172,7 @@ sub analysis_sv_combinevariantcallsets { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -203,12 +195,12 @@ sub analysis_sv_combinevariantcallsets { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -344,7 +336,7 @@ sub analysis_sv_combinevariantcallsets { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -382,13 +374,13 @@ sub analysis_sv_combinevariantcallsets { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, parallel_chains_ref => \@parallel_chains, @@ -571,7 +563,7 @@ sub _preprocess_joint_callers_file { _add_to_parallel_chain( { - parallel_chains_ref => $parallel_chains_ref, + parallel_chains_ref => $parallel_chains_ref, structural_variant_caller_chain => $parameter_href->{$structural_variant_caller}{chain}, } @@ -588,10 +580,10 @@ sub _preprocess_joint_callers_file { say {$filehandle} q{## Split multiallelic variants}; bcftools_norm( { - filehandle => $filehandle, - infile_path => $infile_path, - multiallelic => q{-}, - outfile_path => $decompose_outfile_path, + filehandle => $filehandle, + infile_path => $infile_path, + multiallelic => q{-}, + outfile_path => $decompose_outfile_path, } ); say {$filehandle} $NEWLINE; @@ -711,12 +703,11 @@ sub _preprocess_single_callers_file { my $infile_suffix = $sample_io{$stream}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - push @{ $file_path_href->{$structural_variant_caller} }, - $infile_path . $DOT . q{gz}; + push @{ $file_path_href->{$structural_variant_caller} }, $infile_path . $DOT . q{gz}; _add_to_parallel_chain( { - parallel_chains_ref => $parallel_chains_ref, + parallel_chains_ref => $parallel_chains_ref, structural_variant_caller_chain => $parameter_href->{$structural_variant_caller}{chain}, } @@ -842,8 +833,7 @@ sub _merge_or_reformat_single_callers_file { if ( scalar @{ $active_parameter_href->{sample_ids} } > 1 ) { ## Merge all structural variant caller's vcf files per sample_id - say {$filehandle} - q{## Merge all structural variant caller's vcf files per sample_id}; + say {$filehandle} q{## Merge all structural variant caller's vcf files per sample_id}; bcftools_merge( { diff --git a/lib/MIP/Recipes/Analysis/Sv_reformat.pm b/lib/MIP/Recipes/Analysis/Sv_reformat.pm index 1e28fc36f..28eef0dcd 100644 --- a/lib/MIP/Recipes/Analysis/Sv_reformat.pm +++ b/lib/MIP/Recipes/Analysis/Sv_reformat.pm @@ -131,7 +131,7 @@ sub analysis_reformat_sv { use MIP::Analysis qw{ get_vcf_parser_analysis_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Software::Gnu_grep qw{ gnu_grep }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -163,25 +163,17 @@ sub analysis_reformat_sv { my @infile_paths = @{ $io{in}{file_paths} }; my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); my @vcfparser_analysis_types = get_vcf_parser_analysis_suffix( { - vcfparser_outfile_count => - $active_parameter_href->{sv_vcfparser_outfile_count}, + vcfparser_outfile_count => $active_parameter_href->{sv_vcfparser_outfile_count}, } ); @@ -194,7 +186,7 @@ sub analysis_reformat_sv { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -218,12 +210,12 @@ sub analysis_reformat_sv { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -254,8 +246,7 @@ sub analysis_reformat_sv { { filehandle => $filehandle, infile_paths_ref => [$infile_path], - java_jar => - catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), + java_jar => catfile( $active_parameter_href->{picardtools_path}, q{picard.jar} ), java_use_large_pages => $active_parameter_href->{java_use_large_pages}, memory_allocation => q{Xmx} . $JAVA_MEMORY_ALLOCATION . q{g}, outfile_path => $outfile_paths[$infile_index], @@ -274,13 +265,10 @@ sub analysis_reformat_sv { ? q{sv_reformat_remove_genes_file_clinical} : q{sv_reformat_remove_genes_file_research}; - my $filter_file_path = catfile( $reference_dir, - $active_parameter_href->{sv_reformat_remove_genes_file} ); + my $filter_file_path = + catfile( $reference_dir, $active_parameter_href->{sv_reformat_remove_genes_file} ); my $filter_outfile_path = - $outfile_path_prefix - . $UNDERSCORE - . q{filtered} - . $outfile_suffixes[$infile_index]; + $outfile_path_prefix . $UNDERSCORE . q{filtered} . $outfile_suffixes[$infile_index]; ## Removes contig_names from contigs array if no male or other found gnu_grep( { @@ -293,7 +281,7 @@ sub analysis_reformat_sv { ); say {$filehandle} $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Save filtered file set_recipe_metafile_in_sample_info( @@ -318,7 +306,7 @@ sub analysis_reformat_sv { } ); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { my $outfile_path = $outfile_paths[$infile_index]; @@ -347,17 +335,17 @@ sub analysis_reformat_sv { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Telomerecat.pm b/lib/MIP/Recipes/Analysis/Telomerecat.pm index 8394f13db..931a86c8c 100644 --- a/lib/MIP/Recipes/Analysis/Telomerecat.pm +++ b/lib/MIP/Recipes/Analysis/Telomerecat.pm @@ -111,10 +111,10 @@ sub analysis_telomerecat { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Telomerecat qw{ telomerecat_bam2length }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ get_pedigree_sample_id_attributes set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -125,18 +125,10 @@ sub analysis_telomerecat { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -179,7 +171,7 @@ sub analysis_telomerecat { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -198,12 +190,12 @@ sub analysis_telomerecat { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -218,7 +210,7 @@ sub analysis_telomerecat { filehandle => $filehandle, infile_paths_ref => \@infile_paths, outfile_path => $outfile_path, - processes => $recipe_resource{core_number}, + processes => $recipe{core_number}, temp_directory => $active_parameter_href->{temp_directory}, } ); @@ -236,7 +228,7 @@ sub analysis_telomerecat { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -259,13 +251,13 @@ sub analysis_telomerecat { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Tiddit.pm b/lib/MIP/Recipes/Analysis/Tiddit.pm index c1d238992..39752f272 100644 --- a/lib/MIP/Recipes/Analysis/Tiddit.pm +++ b/lib/MIP/Recipes/Analysis/Tiddit.pm @@ -124,11 +124,11 @@ sub analysis_tiddit { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ print_wait submit_recipe }; use MIP::Program::Svdb qw{ svdb_merge }; use MIP::Program::Tiddit qw{ tiddit_sv }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -138,20 +138,14 @@ sub analysis_tiddit { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); my $max_cores_per_node = $active_parameter_href->{max_cores_per_node}; my $modifier_core_number = scalar( @{ $active_parameter_href->{sample_ids} } ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -159,7 +153,7 @@ sub analysis_tiddit { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -182,14 +176,14 @@ sub analysis_tiddit { { max_cores_per_node => $max_cores_per_node, modifier_core_number => $modifier_core_number, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); my $memory_allocation = update_memory_allocation( { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -202,7 +196,7 @@ sub analysis_tiddit { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -215,9 +209,7 @@ sub analysis_tiddit { ## Collect infiles for all sample_ids to enable migration to temporary directory SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { ## Get the io infiles per chain and id my %sample_io = get_io_files( @@ -233,9 +225,8 @@ sub analysis_tiddit { my $infile_suffix = $sample_io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - $tiddit_sample_file_info{$sample_id}{in} = $infile_path; - $tiddit_sample_file_info{$sample_id}{out} = - $outfile_path_prefix . $UNDERSCORE . $sample_id; + $tiddit_sample_file_info{$sample_id}{in} = $infile_path; + $tiddit_sample_file_info{$sample_id}{out} = $outfile_path_prefix . $UNDERSCORE . $sample_id; } say {$filehandle} q{wait}, $NEWLINE; @@ -244,9 +235,7 @@ sub analysis_tiddit { ## Tiddit sv calling per sample id SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { $process_batches_count = print_wait( { @@ -260,8 +249,8 @@ sub analysis_tiddit { ## Tiddit tiddit_sv( { - filehandle => $filehandle, - infile_path => $tiddit_sample_file_info{$sample_id}{in}, + filehandle => $filehandle, + infile_path => $tiddit_sample_file_info{$sample_id}{in}, minimum_number_supporting_pairs => $active_parameter_href->{tiddit_minimum_number_supporting_pairs}, outfile_path_prefix => $tiddit_sample_file_info{$sample_id}{out}, @@ -290,7 +279,7 @@ sub analysis_tiddit { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -302,13 +291,13 @@ sub analysis_tiddit { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Tiddit_coverage.pm b/lib/MIP/Recipes/Analysis/Tiddit_coverage.pm index 6a34a7d8c..de7251328 100644 --- a/lib/MIP/Recipes/Analysis/Tiddit_coverage.pm +++ b/lib/MIP/Recipes/Analysis/Tiddit_coverage.pm @@ -121,12 +121,12 @@ sub analysis_tiddit_coverage { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Tiddit qw{ tiddit_coverage }; use MIP::Program::Ucsc qw{ ucsc_wig_to_big_wig }; use MIP::Reference qw{ write_contigs_size_file }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -151,17 +151,10 @@ sub analysis_tiddit_coverage { my $infile_suffix = $io{in}{file_suffix}; my $infile_path = $infile_path_prefix . $infile_suffix; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -170,7 +163,7 @@ sub analysis_tiddit_coverage { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -195,12 +188,12 @@ sub analysis_tiddit_coverage { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, job_id_href => $job_id_href, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -223,13 +216,11 @@ sub analysis_tiddit_coverage { say {$filehandle} $NEWLINE; ## Create chromosome name and size file - my $contigs_size_file_path = - catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); + my $contigs_size_file_path = catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); write_contigs_size_file( { - fai_file_path => $active_parameter_href->{human_genome_reference} - . $DOT . q{fai}, - outfile_path => $contigs_size_file_path, + fai_file_path => $active_parameter_href->{human_genome_reference} . $DOT . q{fai}, + outfile_path => $contigs_size_file_path, } ); @@ -248,7 +239,7 @@ sub analysis_tiddit_coverage { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -273,13 +264,13 @@ sub analysis_tiddit_coverage { submit_recipe( { - base_command => $profile_base_command, - dependency_method => q{sample_to_sample}, - case_id => $case_id, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + dependency_method => q{sample_to_sample}, + case_id => $case_id, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Trim_galore.pm b/lib/MIP/Recipes/Analysis/Trim_galore.pm index 565fda864..30be88d83 100644 --- a/lib/MIP/Recipes/Analysis/Trim_galore.pm +++ b/lib/MIP/Recipes/Analysis/Trim_galore.pm @@ -123,10 +123,10 @@ sub analysis_trim_galore { use MIP::Cluster qw{ update_memory_allocation }; use MIP::File_info qw{ get_sample_file_attribute }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Trim_galore qw{ trim_galore }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -150,16 +150,7 @@ sub analysis_trim_galore { my @infile_names = @{ $io{in}{file_names} }; my @infile_name_prefixes = @{ $io{in}{file_name_prefixes} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Construct outfiles +## Construct outfiles my $outsample_directory = catdir( $active_parameter_href->{outdata_dir}, $sample_id, $recipe_name ); @@ -172,12 +163,20 @@ sub analysis_trim_galore { } ); + my %recipe = parse_recipe_prerequisites( + { + active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, + recipe_name => $recipe_name, + } + ); + ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_paths_ref => \@outfile_paths, @@ -202,13 +201,6 @@ sub analysis_trim_galore { } ); - my %recipe_resource = get_recipe_resources( - { - active_parameter_href => $active_parameter_href, - recipe_name => $recipe_name, - } - ); - my $parallel_processes = scalar @{ $file_info_sample{no_direction_infile_prefixes} }; my ( $process_core_number, $recipe_core_number ) = _get_cores_for_trimgalore( { @@ -221,7 +213,7 @@ sub analysis_trim_galore { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $recipe_core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -234,7 +226,7 @@ sub analysis_trim_galore { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -304,7 +296,7 @@ sub analysis_trim_galore { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Outfiles my $outfile_path = $outfile_paths[0]; @@ -338,13 +330,13 @@ sub analysis_trim_galore { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -428,8 +420,7 @@ sub _construct_trim_galore_outfile_paths { ); my $outfile_path_prefix = - catfile( $outsample_directory, - ${$infile_name_prefixes_ref}[$paired_end_tracker] ); + catfile( $outsample_directory, ${$infile_name_prefixes_ref}[$paired_end_tracker] ); ## The suffixes differs depending on whether the reads are paired or not if ( $sequence_run_type eq q{paired-end} ) { @@ -440,8 +431,7 @@ sub _construct_trim_galore_outfile_paths { $paired_end_tracker++; $outfile_path_prefix = - catfile( $outsample_directory, - ${$infile_name_prefixes_ref}[$paired_end_tracker] ); + catfile( $outsample_directory, ${$infile_name_prefixes_ref}[$paired_end_tracker] ); push @outfile_paths, $outfile_path_prefix . q{_val_2.fq.gz}; } @@ -488,8 +478,7 @@ sub _get_cores_for_trimgalore { ## Currently (Trim galore v0.6.5) the way to calculate the core argument to trim galore: ## Always three cores for overhead (1 for trim galore and 2 for cutadapt) ## the rest are splitted between the three processe (read, write and cutadapt). - my $core_argument = - floor( ( $max_cores_per_node / $parallel_processes - $THREE ) / $THREE ); + my $core_argument = floor( ( $max_cores_per_node / $parallel_processes - $THREE ) / $THREE ); my $recipe_core_number = ( $core_argument * $THREE + $THREE ) * $parallel_processes; ## Only supply core argument if more than 1 diff --git a/lib/MIP/Recipes/Analysis/Upd.pm b/lib/MIP/Recipes/Analysis/Upd.pm index 8742a9407..69db25c3d 100644 --- a/lib/MIP/Recipes/Analysis/Upd.pm +++ b/lib/MIP/Recipes/Analysis/Upd.pm @@ -122,7 +122,7 @@ sub analysis_upd { use MIP::File::Path qw{ remove_file_path_suffix }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Pedigree qw{ is_sample_proband_in_trio }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -166,17 +166,10 @@ sub analysis_upd { my $infile_path_prefix = $io{in}{file_path_prefix}; my $infile_path = $infile_path_prefix . q{.vcf.gz}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -193,7 +186,7 @@ sub analysis_upd { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix =~ s/$case_id/$sample_id/xmsr, @@ -215,12 +208,12 @@ sub analysis_upd { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -231,19 +224,16 @@ sub analysis_upd { say {$filehandle} q{## } . $recipe_name; ## Create chromosome name and size file - my $contigs_size_file_path = - catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); + my $contigs_size_file_path = catfile( $outdir_path, q{contigs_size_file} . $DOT . q{tsv} ); write_contigs_size_file( { - fai_file_path => $active_parameter_href->{human_genome_reference} - . $DOT . q{fai}, - outfile_path => $contigs_size_file_path, + fai_file_path => $active_parameter_href->{human_genome_reference} . $DOT . q{fai}, + outfile_path => $contigs_size_file_path, } ); ## Get family hash - my %family_member_id = - get_family_member_id( { sample_info_href => $sample_info_href } ); + my %family_member_id = get_family_member_id( { sample_info_href => $sample_info_href } ); CALL_TYPE: foreach my $call_type (@call_types) { @@ -290,7 +280,7 @@ sub analysis_upd { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -325,13 +315,13 @@ sub analysis_upd { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Varg.pm b/lib/MIP/Recipes/Analysis/Varg.pm index 7d4108ba1..1c6df3607 100644 --- a/lib/MIP/Recipes/Analysis/Varg.pm +++ b/lib/MIP/Recipes/Analysis/Varg.pm @@ -113,7 +113,7 @@ sub analysis_varg { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Varg qw{ varg_compare }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -149,17 +149,10 @@ sub analysis_varg { my $infile_path_sv = $io{out}{file_path_href}{selected} . $DOT . q{gz}; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -169,7 +162,7 @@ sub analysis_varg { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -191,12 +184,12 @@ sub analysis_varg { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -234,7 +227,7 @@ sub analysis_varg { ## Close filehandles close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -247,13 +240,13 @@ sub analysis_varg { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Variant_annotation.pm b/lib/MIP/Recipes/Analysis/Variant_annotation.pm index 20b857445..6661a7f20 100644 --- a/lib/MIP/Recipes/Analysis/Variant_annotation.pm +++ b/lib/MIP/Recipes/Analysis/Variant_annotation.pm @@ -116,12 +116,12 @@ sub analysis_variant_annotation { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_concat bcftools_index bcftools_view }; use MIP::Program::Vcfanno qw{ vcfanno }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -145,17 +145,11 @@ sub analysis_variant_annotation { my %infile_path = %{ $io{in}{file_path_href} }; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -165,7 +159,7 @@ sub analysis_variant_annotation { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -190,12 +184,12 @@ sub analysis_variant_annotation { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -208,7 +202,7 @@ sub analysis_variant_annotation { ## Create file commands for xargs my ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -252,7 +246,7 @@ sub analysis_variant_annotation { ## Create file commands for xargs ( $xargs_file_counter, $xargs_file_path_prefix ) = xargs_command( { - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, filehandle => $filehandle, file_path => $recipe_file_path, recipe_info_path => $recipe_info_path, @@ -289,7 +283,7 @@ sub analysis_variant_annotation { output_type => q{z}, outfile_path => $concat_outfile_path, rm_dups => 0, - threads => $recipe_resource{core_number} - 1, + threads => $recipe{core_number} - 1, } ); say {$filehandle} $NEWLINE; @@ -316,7 +310,7 @@ sub analysis_variant_annotation { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -332,7 +326,7 @@ sub analysis_variant_annotation { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -431,11 +425,11 @@ sub analysis_variant_annotation_panel { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Program::Bcftools qw{ bcftools_index bcftools_view }; use MIP::Program::Vcfanno qw{ vcfanno }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; @@ -458,17 +452,10 @@ sub analysis_variant_annotation_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -478,7 +465,7 @@ sub analysis_variant_annotation_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -499,12 +486,12 @@ sub analysis_variant_annotation_panel { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -558,7 +545,7 @@ sub analysis_variant_annotation_panel { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -574,7 +561,7 @@ sub analysis_variant_annotation_panel { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Vcf2cytosure.pm b/lib/MIP/Recipes/Analysis/Vcf2cytosure.pm index 85cd20548..b560e7728 100644 --- a/lib/MIP/Recipes/Analysis/Vcf2cytosure.pm +++ b/lib/MIP/Recipes/Analysis/Vcf2cytosure.pm @@ -128,7 +128,7 @@ sub analysis_vcf2cytosure { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ print_wait submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view bcftools_rename_vcf_samples }; @@ -144,17 +144,10 @@ sub analysis_vcf2cytosure { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -164,7 +157,7 @@ sub analysis_vcf2cytosure { ## Set and get the io files per chain, id and stream my %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, outdata_dir => $active_parameter_href->{outdata_dir}, @@ -189,7 +182,7 @@ sub analysis_vcf2cytosure { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar @{ $active_parameter_href->{sample_ids} }, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); @@ -198,7 +191,7 @@ sub analysis_vcf2cytosure { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -211,7 +204,7 @@ sub analysis_vcf2cytosure { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -242,9 +235,7 @@ sub analysis_vcf2cytosure { my %recipe_tag_keys = ( gatk_baserecalibration => q{out}, ); my $process_batches_count = 1; - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { PROGRAM_TAG: while ( my ( $recipe_tag, $stream ) = each %recipe_tag_keys ) { @@ -273,9 +264,7 @@ sub analysis_vcf2cytosure { say {$filehandle} q{## Creating coverage file with tiddit -cov for samples}; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { my $tiddit_cov_file_path = $outfile_path_prefix . $UNDERSCORE . q{tiddit} . $UNDERSCORE . $sample_id; @@ -301,15 +290,9 @@ sub analysis_vcf2cytosure { say {$filehandle} q{## Using bcftools_view to extract SVs for samples} . $NEWLINE; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { my $bcftools_outfile_path = - $outfile_path_prefix - . $UNDERSCORE - . q{filtered} - . $UNDERSCORE - . $sample_id . q{.vcf}; + $outfile_path_prefix . $UNDERSCORE . q{filtered} . $UNDERSCORE . $sample_id . q{.vcf}; ## Store sample_id vcf file for use downstream $vcf2cytosure_file_info{$sample_id}{in}{q{.vcf}} = @@ -349,10 +332,10 @@ sub analysis_vcf2cytosure { bcftools_rename_vcf_samples( { - create_sample_file => 1, - filehandle => $filehandle, - infile => $vcf2cytosure_file_info{$sample_id}{in}{q{.vcf}}, - index => 0, + create_sample_file => 1, + filehandle => $filehandle, + infile => $vcf2cytosure_file_info{$sample_id}{in}{q{.vcf}}, + index => 0, outfile_path_prefix => $vcf2cytosure_file_info{$sample_id}{in}{$sample_display_name}, output_type => q{v}, @@ -363,18 +346,15 @@ sub analysis_vcf2cytosure { ## Exhange for new display name vcf $vcf2cytosure_file_info{$sample_id}{in}{q{.vcf}} = - $vcf2cytosure_file_info{$sample_id}{in}{$sample_display_name} - . $DOT . q{vcf}; + $vcf2cytosure_file_info{$sample_id}{in}{$sample_display_name} . $DOT . q{vcf}; } } - say {$filehandle} - q{## Converting sample's SV VCF file into cytosure, using Vcf2cytosure} . $NEWLINE; + say {$filehandle} q{## Converting sample's SV VCF file into cytosure, using Vcf2cytosure} + . $NEWLINE; SAMPLE_ID: - while ( my ( $sample_id_index, $sample_id ) = - each @{ $active_parameter_href->{sample_ids} } ) - { + while ( my ( $sample_id_index, $sample_id ) = each @{ $active_parameter_href->{sample_ids} } ) { # Get parameter my $sample_id_sex = get_pedigree_sample_id_attributes( @@ -405,7 +385,7 @@ sub analysis_vcf2cytosure { ); say {$filehandle} $AMPERSAND . $SPACE . $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -430,17 +410,17 @@ sub analysis_vcf2cytosure { } say {$filehandle} q{wait}, $NEWLINE; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{case_to_island}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{case_to_island}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Vcf_ase_reformat.pm b/lib/MIP/Recipes/Analysis/Vcf_ase_reformat.pm index afac1d8bd..16e2419a3 100644 --- a/lib/MIP/Recipes/Analysis/Vcf_ase_reformat.pm +++ b/lib/MIP/Recipes/Analysis/Vcf_ase_reformat.pm @@ -137,7 +137,7 @@ sub analysis_vcf_ase_reformat { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view }; @@ -151,17 +151,10 @@ sub analysis_vcf_ase_reformat { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -170,7 +163,7 @@ sub analysis_vcf_ase_reformat { ## Set input files to user specified vcf file set_io_files( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_paths_ref => [ $active_parameter_href->{dna_vcf_file} ], file_info_href => $file_info_href, @@ -198,7 +191,7 @@ sub analysis_vcf_ase_reformat { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $sample_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$sample_id], @@ -219,12 +212,12 @@ sub analysis_vcf_ase_reformat { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $sample_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -249,7 +242,7 @@ sub analysis_vcf_ase_reformat { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -263,12 +256,12 @@ sub analysis_vcf_ase_reformat { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_sample}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_sample}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Vcf_rerun_reformat.pm b/lib/MIP/Recipes/Analysis/Vcf_rerun_reformat.pm index c3a33181e..cd7f2f12b 100644 --- a/lib/MIP/Recipes/Analysis/Vcf_rerun_reformat.pm +++ b/lib/MIP/Recipes/Analysis/Vcf_rerun_reformat.pm @@ -120,11 +120,11 @@ sub analysis_vcf_rerun_reformat_sv { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Set::File qw{ set_io_files }; @@ -134,17 +134,10 @@ sub analysis_vcf_rerun_reformat_sv { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -153,7 +146,7 @@ sub analysis_vcf_rerun_reformat_sv { ## Set input files to user specified vcf file set_io_files( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_paths_ref => [ $active_parameter_href->{sv_vcf_rerun_file} ], file_info_href => $file_info_href, @@ -181,7 +174,7 @@ sub analysis_vcf_rerun_reformat_sv { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -204,12 +197,12 @@ sub analysis_vcf_rerun_reformat_sv { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -232,17 +225,17 @@ sub analysis_vcf_rerun_reformat_sv { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -345,11 +338,11 @@ sub analysis_vcf_rerun_reformat { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bcftools qw{ bcftools_view }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -358,17 +351,10 @@ sub analysis_vcf_rerun_reformat { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Unpack parameters - my $job_id_chain = get_recipe_attributes( - { - attribute => q{chain}, - parameter_href => $parameter_href, - recipe_name => $recipe_name, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -377,7 +363,7 @@ sub analysis_vcf_rerun_reformat { ## Set input files to user specified vcf file set_io_files( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_paths_ref => [ $active_parameter_href->{vcf_rerun_file} ], file_info_href => $file_info_href, @@ -405,7 +391,7 @@ sub analysis_vcf_rerun_reformat { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$case_id], @@ -428,12 +414,12 @@ sub analysis_vcf_rerun_reformat { my ($recipe_file_path) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -456,17 +442,17 @@ sub analysis_vcf_rerun_reformat { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Vep.pm b/lib/MIP/Recipes/Analysis/Vep.pm index 83dd06478..b74fc64de 100644 --- a/lib/MIP/Recipes/Analysis/Vep.pm +++ b/lib/MIP/Recipes/Analysis/Vep.pm @@ -146,7 +146,7 @@ sub analysis_vep_wgs { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Vep qw{ variant_effect_predictor }; @@ -181,21 +181,14 @@ sub analysis_vep_wgs { my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; my $genome_reference_version = $file_info_href->{human_genome_reference_version}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; my $xargs_file_path_prefix; ## Set and get the io files per chain, id and stream @@ -203,7 +196,7 @@ sub analysis_vep_wgs { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -241,7 +234,7 @@ sub analysis_vep_wgs { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $parallel_processes, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -254,7 +247,7 @@ sub analysis_vep_wgs { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -364,7 +357,7 @@ sub analysis_vep_wgs { close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_metafile_in_sample_info( @@ -390,7 +383,7 @@ sub analysis_vep_wgs { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -507,7 +500,7 @@ sub analysis_vep_sv_wes { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Vep qw{ variant_effect_predictor }; @@ -541,21 +534,14 @@ sub analysis_vep_sv_wes { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my $genome_reference_version = $file_info_href->{human_genome_reference_version}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; my $xargs_file_path_prefix; ## Set and get the io files per chain, id and stream @@ -563,7 +549,7 @@ sub analysis_vep_sv_wes { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, outdata_dir => $active_parameter_href->{outdata_dir}, @@ -597,7 +583,7 @@ sub analysis_vep_sv_wes { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $core_number, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -610,7 +596,7 @@ sub analysis_vep_sv_wes { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -697,7 +683,7 @@ sub analysis_vep_sv_wes { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -712,7 +698,7 @@ sub analysis_vep_sv_wes { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -828,7 +814,7 @@ sub analysis_vep_sv_wgs { use MIP::Cluster qw{ get_core_number update_memory_allocation }; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Vep qw{ variant_effect_predictor }; @@ -865,17 +851,10 @@ sub analysis_vep_sv_wgs { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my $genome_reference_version = $file_info_href->{human_genome_reference_version}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -886,7 +865,7 @@ sub analysis_vep_sv_wgs { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, outdata_dir => $active_parameter_href->{outdata_dir}, @@ -913,7 +892,7 @@ sub analysis_vep_sv_wgs { { max_cores_per_node => $active_parameter_href->{max_cores_per_node}, modifier_core_number => scalar @{ $file_info_href->{contigs_size_ordered} }, - recipe_core_number => $recipe_resource{core_number}, + recipe_core_number => $recipe{core_number}, } ); @@ -925,7 +904,7 @@ sub analysis_vep_sv_wgs { { node_ram_memory => $active_parameter_href->{node_ram_memory}, parallel_processes => $parallel_processes, - process_memory_allocation => $recipe_resource{memory}, + process_memory_allocation => $recipe{memory}, } ); @@ -938,7 +917,7 @@ sub analysis_vep_sv_wgs { filehandle => $filehandle, job_id_href => $job_id_href, memory_allocation => $memory_allocation, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -1090,7 +1069,7 @@ sub analysis_vep_sv_wgs { close $xargsfilehandle; close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -1105,7 +1084,7 @@ sub analysis_vep_sv_wgs { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, @@ -1209,7 +1188,7 @@ sub analysis_vep { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::List qw{ get_splitted_lists }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -1242,17 +1221,10 @@ sub analysis_vep { my $consensus_analysis_type = $parameter_href->{cache}{consensus_analysis_type}; my $genome_reference_version = $file_info_href->{human_genome_reference_version}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -1260,7 +1232,7 @@ sub analysis_vep { ## Set and get the io files per chain, id and stream %io = parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -1282,12 +1254,12 @@ sub analysis_vep { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, } @@ -1399,7 +1371,7 @@ sub analysis_vep { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## Collect QC metadata info for later use set_recipe_outfile_in_sample_info( @@ -1428,7 +1400,7 @@ sub analysis_vep { base_command => $profile_base_command, case_id => $case_id, dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, job_id_href => $job_id_href, job_reservation_name => $active_parameter_href->{job_reservation_name}, log => $log, diff --git a/lib/MIP/Recipes/Analysis/Vt.pm b/lib/MIP/Recipes/Analysis/Vt.pm index a50fe4f2c..342aec257 100644 --- a/lib/MIP/Recipes/Analysis/Vt.pm +++ b/lib/MIP/Recipes/Analysis/Vt.pm @@ -132,7 +132,6 @@ sub analysis_vt { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Perl qw{ perl_nae_oneliners }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; @@ -140,6 +139,7 @@ sub analysis_vt { use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Recipes::Analysis::Vt_core qw{ analysis_vt_core_rio}; use MIP::Recipes::Analysis::Xargs qw{ xargs_command }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -163,28 +163,21 @@ sub analysis_vt { my %infile_path = %{ $io{in}{file_path_href} }; my @contigs_size_ordered = @{ $file_info_href->{contigs_size_ordered} }; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); - my $core_number = $recipe_resource{core_number}; + my $core_number = $recipe{core_number}; ## Set and get the io files per chain, id and stream %io = ( %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefix => $infile_name_prefix, @@ -214,8 +207,8 @@ sub analysis_vt { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -268,7 +261,7 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor ); if ( $contig_index == 0 - && $recipe_mode == 1 ) + && $recipe{mode} == 1 ) { ## Split to enable submission to sample_info QC later @@ -276,8 +269,8 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor splitpath($xargs_file_path_prefix); ## Collect QC metadata info for later use - my $qc_vt_outfile_path = catfile( $directory, - $stderr_file_xargs . $DOT . $contig . $DOT . q{stderr.txt} ); + my $qc_vt_outfile_path = + catfile( $directory, $stderr_file_xargs . $DOT . $contig . $DOT . q{stderr.txt} ); set_recipe_outfile_in_sample_info( { path => $qc_vt_outfile_path, @@ -326,17 +319,17 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor close $xargsfilehandle or $log->logcroak(q{Could not close xargsfilehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, @@ -442,13 +435,13 @@ sub analysis_vt_panel { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; use MIP::Get::File qw{ get_io_files }; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Language::Perl qw{ perl_nae_oneliners }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Parse::File qw{ parse_io_outfiles }; use MIP::Processmanagement::Processes qw{ submit_recipe }; - use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Analysis::Vt_core qw{ analysis_vt_core_rio}; + use MIP::Sample_info qw{ set_recipe_outfile_in_sample_info }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -471,17 +464,10 @@ sub analysis_vt_panel { my $infile_name_prefix = $io{in}{file_name_prefix}; my $infile_path = $io{in}{file_path}; - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -491,7 +477,7 @@ sub analysis_vt_panel { %io, parse_io_outfiles( { - chain_id => $job_id_chain, + chain_id => $recipe{job_id_chain}, id => $case_id, file_info_href => $file_info_href, file_name_prefixes_ref => [$infile_name_prefix], @@ -515,12 +501,12 @@ sub analysis_vt_panel { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_directory => $recipe_name, recipe_name => $recipe_name, temp_directory => $temp_directory, @@ -553,8 +539,7 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor ## Remove decomposed '*' entries if ( $active_parameter_href->{vt_missing_alt_allele} ) { - my $removed_outfile_path = - $outfile_path_prefix . $UNDERSCORE . q{nostar} . $outfile_suffix; + my $removed_outfile_path = $outfile_path_prefix . $UNDERSCORE . q{nostar} . $outfile_suffix; perl_nae_oneliners( { filehandle => $filehandle, @@ -578,7 +563,7 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { set_recipe_outfile_in_sample_info( { @@ -590,13 +575,13 @@ q{## vt - Decompose (split multi allelic records into single records) and/or nor submit_recipe( { - base_command => $profile_base_command, - case_id => $case_id, - dependency_method => q{sample_to_case}, - job_id_chain => $job_id_chain, - job_id_href => $job_id_href, - job_reservation_name => $active_parameter_href->{job_reservation_name}, - log => $log, + base_command => $profile_base_command, + case_id => $case_id, + dependency_method => q{sample_to_case}, + job_id_chain => $recipe{job_id_chain}, + job_id_href => $job_id_href, + job_reservation_name => $active_parameter_href->{job_reservation_name}, + log => $log, max_parallel_processes_count_href => $file_info_href->{max_parallel_processes_count}, recipe_file_path => $recipe_file_path, diff --git a/lib/MIP/Recipes/Analysis/Vt_core.pm b/lib/MIP/Recipes/Analysis/Vt_core.pm index 827114861..79e0181ad 100644 --- a/lib/MIP/Recipes/Analysis/Vt_core.pm +++ b/lib/MIP/Recipes/Analysis/Vt_core.pm @@ -111,7 +111,7 @@ sub analysis_vt_core { store => \$build_gatk_index, strict_type => 1, }, - cmd_break => { default => $NEWLINE x 2, store => \$cmd_break, strict_type => 1, }, + cmd_break => { default => $NEWLINE x 2, store => \$cmd_break, strict_type => 1, }, core_number => { allow => qr/ \A \d+ \z /xsm, default => 1, @@ -166,7 +166,7 @@ sub analysis_vt_core { store => \$outfile_path, strict_type => 1, }, - parameter_href => { default => {}, strict_type => 1, store => \$parameter_href, }, + parameter_href => { default => {}, strict_type => 1, store => \$parameter_href, }, profile_base_command => { default => q{sbatch}, store => \$profile_base_command, @@ -174,9 +174,8 @@ sub analysis_vt_core { }, recipe_directory => { default => q{vt_core}, store => \$recipe_directory, strict_type => 1, }, - recipe_name => - { default => q{vt_core}, store => \$recipe_name, strict_type => 1, }, - tabix => { + recipe_name => { default => q{vt_core}, store => \$recipe_name, strict_type => 1, }, + tabix => { allow => [ undef, 0, 1 ], default => 0, store => \$tabix, @@ -188,13 +187,11 @@ sub analysis_vt_core { store => \$uniq, strict_type => 1, }, - xargs_file_path_prefix => - { store => \$xargs_file_path_prefix, strict_type => 1, }, + xargs_file_path_prefix => { store => \$xargs_file_path_prefix, strict_type => 1, }, }; check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_attributes get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Program::Gnu::Software::Gnu_sed qw{ gnu_sed }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -202,6 +199,7 @@ sub analysis_vt_core { use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; use MIP::Program::Bcftools qw{ bcftools_view bcftools_index }; use MIP::Program::Vt qw{ vt_decompose vt_normalize vt_uniq }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -213,17 +211,10 @@ sub analysis_vt_core { my $log = Log::Log4perl->get_logger($LOG_NAME); ## Set MIP recipe name - my $job_id_chain = get_recipe_attributes( - { - parameter_href => $parameter_href, - recipe_name => $recipe_name, - attribute => q{chain}, - } - ); - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -238,12 +229,12 @@ sub analysis_vt_core { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - process_time => $recipe_resource{time}, + memory_allocation => $recipe{memory}, + process_time => $recipe{time}, recipe_name => $recipe_name, recipe_directory => $recipe_directory, } @@ -258,8 +249,7 @@ sub analysis_vt_core { if ( defined $xargs_file_path_prefix && defined $contig ) { ## Redirect xargs output to program specific stderr file - $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; } ## Initate processing @@ -414,7 +404,7 @@ sub analysis_vt_core { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -505,7 +495,7 @@ sub analysis_vt_core_rio { strict_type => 1, store => \$bgzip }, - cmd_break => { default => $NEWLINE x 2, strict_type => 1, store => \$cmd_break }, + cmd_break => { default => $NEWLINE x 2, strict_type => 1, store => \$cmd_break }, contig => { strict_type => 1, store => \$contig }, core_number => { default => 1, @@ -560,9 +550,8 @@ sub analysis_vt_core_rio { strict_type => 1, store => \$outfile_path, }, - recipe_directory => - { default => q{vt}, strict_type => 1, store => \$recipe_directory }, - recipe_name => { default => q{vt}, strict_type => 1, store => \$recipe_name }, + recipe_directory => { default => q{vt}, strict_type => 1, store => \$recipe_directory }, + recipe_name => { default => q{vt}, strict_type => 1, store => \$recipe_name }, xargs_file_path_prefix => { strict_type => 1, store => \$xargs_file_path_prefix }, tabix => { default => 0, @@ -582,8 +571,7 @@ sub analysis_vt_core_rio { use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Program::Gnu::Software::Gnu_sed qw{ gnu_sed }; - use MIP::Processmanagement::Slurm_processes - qw{ slurm_submit_job_no_dependency_add_to_samples }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_add_to_samples }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; use MIP::Program::Bcftools qw{ bcftools_view bcftools_index }; use MIP::Program::Vt qw{ vt_decompose vt_normalize vt_uniq }; @@ -595,9 +583,6 @@ sub analysis_vt_core_rio { ## Retrieve logger object my $log = Log::Log4perl->get_logger($LOG_NAME); - ## Set MIP recipe name - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my $recipe_info_path; ## Generate a random integer between 0-10,000. @@ -612,8 +597,7 @@ sub analysis_vt_core_rio { if ( defined $xargs_file_path_prefix && defined $contig ) { ## Redirect xargs output to program specific stderr file - $stderrfile_path = - $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; + $stderrfile_path = $xargs_file_path_prefix . $DOT . $contig . $DOT . q{stderr.txt}; } ## Initate processing diff --git a/lib/MIP/Recipes/Build/Bwa_prerequisites.pm b/lib/MIP/Recipes/Build/Bwa_prerequisites.pm index 2c56e959d..eebb4a2f4 100644 --- a/lib/MIP/Recipes/Build/Bwa_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Bwa_prerequisites.pm @@ -141,10 +141,10 @@ sub build_bwa_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bwa qw{ bwa_index }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Build::Human_genome_prerequisites qw{ build_human_genome_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; @@ -153,11 +153,10 @@ sub build_bwa_prerequisites { Readonly my $PROCESSING_TIME => 3; ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => q{bwa_mem}, } ); @@ -179,7 +178,7 @@ sub build_bwa_prerequisites { process_time => $PROCESSING_TIME, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -243,7 +242,7 @@ sub build_bwa_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -252,7 +251,7 @@ sub build_bwa_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, @@ -375,10 +374,10 @@ sub build_bwa_mem2_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Bwa qw{ bwa_mem2_index }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Build::Human_genome_prerequisites qw{ build_human_genome_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; @@ -388,12 +387,11 @@ sub build_bwa_mem2_prerequisites { Readonly my $PROCESSING_TIME => 3; ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, - recipe_name => q{bwa_mem2}, + parameter_href => $parameter_href, + recipe_name => q{bwa_mem}, } ); @@ -415,7 +413,7 @@ sub build_bwa_mem2_prerequisites { process_time => $PROCESSING_TIME, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -478,7 +476,7 @@ sub build_bwa_mem2_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -487,7 +485,7 @@ sub build_bwa_mem2_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, diff --git a/lib/MIP/Recipes/Build/Capture_file_prerequisites.pm b/lib/MIP/Recipes/Build/Capture_file_prerequisites.pm index 0b3f76081..c7a570640 100644 --- a/lib/MIP/Recipes/Build/Capture_file_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Capture_file_prerequisites.pm @@ -132,12 +132,12 @@ sub build_capture_file_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_rm gnu_cat gnu_ln }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Picardtools qw{ picardtools_createsequencedictionary }; use MIP::Program::Picardtools qw{ picardtools_intervallisttools }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ## Constants @@ -150,11 +150,11 @@ sub build_capture_file_prerequisites { ## Unpack parameters my $interval_list_suffix = $parameter_build_suffixes_ref->[0]; my $padded_interval_list_suffix = $parameter_build_suffixes_ref->[1]; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, - recipe_name => q{mip}, + parameter_href => $parameter_href, + recipe_name => $recipe_name, } ); my $referencefile_path = $active_parameter_href->{human_genome_reference}; @@ -176,7 +176,7 @@ sub build_capture_file_prerequisites { job_id_href => $job_id_href, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); } @@ -332,7 +332,7 @@ sub build_capture_file_prerequisites { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { diff --git a/lib/MIP/Recipes/Build/Human_genome_prerequisites.pm b/lib/MIP/Recipes/Build/Human_genome_prerequisites.pm index 6e521169d..a83b28441 100644 --- a/lib/MIP/Recipes/Build/Human_genome_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Human_genome_prerequisites.pm @@ -143,7 +143,6 @@ sub build_human_genome_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_rm gnu_ln }; use MIP::Language::Java qw{ java_core }; use MIP::Language::Shell qw{ check_exist_and_move_file }; @@ -151,6 +150,7 @@ sub build_human_genome_prerequisites { use MIP::Program::Samtools qw{ samtools_faidx }; use MIP::Program::Picardtools qw{ picardtools_createsequencedictionary }; use MIP::Processmanagement::Processes qw{ submit_recipe }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Build::Capture_file_prerequisites qw{ build_capture_file_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; @@ -161,11 +161,11 @@ sub build_human_genome_prerequisites { my $submit_switch; ## Unpack parameters - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, - recipe_name => q{mip}, + parameter_href => $parameter_href, + recipe_name => $recipe_name, } ); @@ -189,7 +189,7 @@ sub build_human_genome_prerequisites { directory_id => $case_id, recipe_name => $recipe_name, recipe_directory => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); } @@ -352,7 +352,7 @@ sub build_human_genome_prerequisites { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { diff --git a/lib/MIP/Recipes/Build/Rtg_prerequisites.pm b/lib/MIP/Recipes/Build/Rtg_prerequisites.pm index 55a026107..a119684de 100644 --- a/lib/MIP/Recipes/Build/Rtg_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Rtg_prerequisites.pm @@ -141,10 +141,10 @@ sub build_rtg_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Rtg qw{ rtg_format }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Build::Human_genome_prerequisites qw{ build_human_genome_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; @@ -153,11 +153,10 @@ sub build_rtg_prerequisites { Readonly my $PROCESSING_TIME => 3; ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => q{mip}, } ); @@ -180,7 +179,7 @@ sub build_rtg_prerequisites { process_time => $PROCESSING_TIME, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -243,7 +242,7 @@ sub build_rtg_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -252,7 +251,7 @@ sub build_rtg_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, diff --git a/lib/MIP/Recipes/Build/Salmon_quant_prerequisites.pm b/lib/MIP/Recipes/Build/Salmon_quant_prerequisites.pm index 4d2301046..77c129e59 100644 --- a/lib/MIP/Recipes/Build/Salmon_quant_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Salmon_quant_prerequisites.pm @@ -141,12 +141,12 @@ sub build_salmon_quant_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Star_fusion qw{ star_fusion_gtf_file_to_feature_seqs }; use MIP::Program::Salmon qw{ salmon_index }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ## Constants @@ -154,14 +154,11 @@ sub build_salmon_quant_prerequisites { Readonly my $NUMBER_OF_CORES => $active_parameter_href->{max_cores_per_node}; Readonly my $PROCESSING_TIME => 5; - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my %recipe_resource = get_recipe_resources( +## Unpack parameters + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -181,11 +178,11 @@ sub build_salmon_quant_prerequisites { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, process_time => $PROCESSING_TIME, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -248,7 +245,7 @@ sub build_salmon_quant_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -257,7 +254,7 @@ sub build_salmon_quant_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, diff --git a/lib/MIP/Recipes/Build/Star_fusion_prerequisites.pm b/lib/MIP/Recipes/Build/Star_fusion_prerequisites.pm index 8813d4f15..fc858c265 100644 --- a/lib/MIP/Recipes/Build/Star_fusion_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Star_fusion_prerequisites.pm @@ -141,11 +141,11 @@ sub build_star_fusion_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Star_fusion qw{ star_fusion_prep_genome_lib }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ## Constants @@ -154,11 +154,10 @@ sub build_star_fusion_prerequisites { Readonly my $PROCESSING_TIME => 30; ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -178,11 +177,11 @@ sub build_star_fusion_prerequisites { directory_id => $case_id, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, process_time => $PROCESSING_TIME, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -241,7 +240,7 @@ sub build_star_fusion_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -250,7 +249,7 @@ sub build_star_fusion_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, diff --git a/lib/MIP/Recipes/Build/Star_prerequisites.pm b/lib/MIP/Recipes/Build/Star_prerequisites.pm index 71330ddd7..a9b49d829 100644 --- a/lib/MIP/Recipes/Build/Star_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Star_prerequisites.pm @@ -141,11 +141,11 @@ sub build_star_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Star qw{ star_genome_generate }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ## Constants @@ -154,14 +154,11 @@ sub build_star_prerequisites { Readonly my $PROCESSING_TIME => 3; Readonly my $READ_LENGTH => 150; - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters - my $job_id_chain = $parameter_href->{$recipe_name}{chain}; - my %recipe_resource = get_recipe_resources( +## Unpack parameters + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, + parameter_href => $parameter_href, recipe_name => $recipe_name, } ); @@ -181,11 +178,11 @@ sub build_star_prerequisites { directory_id => $case_id, core_number => $NUMBER_OF_CORES, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, recipe_directory => $recipe_name, recipe_name => $recipe_name, process_time => $PROCESSING_TIME, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -238,7 +235,7 @@ sub build_star_prerequisites { close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { @@ -247,7 +244,7 @@ sub build_star_prerequisites { case_id => $case_id, job_id_href => $job_id_href, log => $log, - job_id_chain => $job_id_chain, + job_id_chain => $recipe{job_id_chain}, recipe_file_path => $recipe_file_path, sample_ids_ref => \@{ $active_parameter_href->{sample_ids} }, submission_profile => $active_parameter_href->{submission_profile}, diff --git a/lib/MIP/Recipes/Build/Transcript_annotation_prerequisites.pm b/lib/MIP/Recipes/Build/Transcript_annotation_prerequisites.pm index 07e991e91..bcfcaa6aa 100644 --- a/lib/MIP/Recipes/Build/Transcript_annotation_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Transcript_annotation_prerequisites.pm @@ -132,10 +132,10 @@ sub build_transcript_annotation_prerequisites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Language::Shell qw{ check_exist_and_move_file }; use MIP::Processmanagement::Processes qw{ submit_recipe }; use MIP::Program::Ucsc qw{ ucsc_gtf_to_genepred }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ## Constants @@ -145,11 +145,12 @@ sub build_transcript_annotation_prerequisites { my $submit_switch; ## Unpack parameters - my $recipe_mode = $active_parameter_href->{$recipe_name}; - my %recipe_resource = get_recipe_resources( + + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, - recipe_name => q{mip}, + parameter_href => $parameter_href, + recipe_name => $recipe_name, } ); @@ -175,7 +176,7 @@ sub build_transcript_annotation_prerequisites { job_id_href => $job_id_href, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); } @@ -233,7 +234,7 @@ sub build_transcript_annotation_prerequisites { close $filehandle; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { diff --git a/lib/MIP/Recipes/Download/1000g_indels.pm b/lib/MIP/Recipes/Download/1000g_indels.pm index ca34a2d59..2b6f4c9b1 100644 --- a/lib/MIP/Recipes/Download/1000g_indels.pm +++ b/lib/MIP/Recipes/Download/1000g_indels.pm @@ -117,7 +117,7 @@ sub download_1000g_indels { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -131,17 +131,14 @@ sub download_1000g_indels { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_1000g_indels { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -183,7 +180,7 @@ sub download_1000g_indels { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/1000g_omni.pm b/lib/MIP/Recipes/Download/1000g_omni.pm index 107d1237e..299a22cd6 100644 --- a/lib/MIP/Recipes/Download/1000g_omni.pm +++ b/lib/MIP/Recipes/Download/1000g_omni.pm @@ -117,7 +117,7 @@ sub download_1000g_omni { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -131,17 +131,14 @@ sub download_1000g_omni { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_1000g_omni { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -183,7 +180,7 @@ sub download_1000g_omni { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/1000g_sites.pm b/lib/MIP/Recipes/Download/1000g_sites.pm index 5ee8093a5..e333b8c73 100644 --- a/lib/MIP/Recipes/Download/1000g_sites.pm +++ b/lib/MIP/Recipes/Download/1000g_sites.pm @@ -117,10 +117,10 @@ sub download_1000g_sites { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_1000g_sites { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_1000g_sites { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -183,7 +180,7 @@ sub download_1000g_sites { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/1000g_snps.pm b/lib/MIP/Recipes/Download/1000g_snps.pm index 1adb51670..aa995fbec 100644 --- a/lib/MIP/Recipes/Download/1000g_snps.pm +++ b/lib/MIP/Recipes/Download/1000g_snps.pm @@ -117,7 +117,7 @@ sub download_1000g_snps { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -131,17 +131,14 @@ sub download_1000g_snps { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_1000g_snps { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref} + source_environment_commands_ref => $recipe{load_env_ref} } ); @@ -183,7 +180,7 @@ sub download_1000g_snps { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Cadd_offline_annotations.pm b/lib/MIP/Recipes/Download/Cadd_offline_annotations.pm index edf25dc9e..5d1166563 100644 --- a/lib/MIP/Recipes/Download/Cadd_offline_annotations.pm +++ b/lib/MIP/Recipes/Download/Cadd_offline_annotations.pm @@ -117,11 +117,11 @@ sub download_cadd_offline_annotations { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir gnu_mv gnu_rm_and_echo }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_cadd_offline_annotations { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_cadd_offline_annotations { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -208,7 +205,7 @@ sub download_cadd_offline_annotations { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Cadd_to_vcf_header.pm b/lib/MIP/Recipes/Download/Cadd_to_vcf_header.pm index 8afbe1bc5..6a1a70fdd 100644 --- a/lib/MIP/Recipes/Download/Cadd_to_vcf_header.pm +++ b/lib/MIP/Recipes/Download/Cadd_to_vcf_header.pm @@ -117,10 +117,10 @@ sub download_cadd_to_vcf_header { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_cadd_to_vcf_header { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_cadd_to_vcf_header { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_cadd_to_vcf_header { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Cadd_whole_genome_snvs.pm b/lib/MIP/Recipes/Download/Cadd_whole_genome_snvs.pm index d3e1e6adb..fa5491904 100644 --- a/lib/MIP/Recipes/Download/Cadd_whole_genome_snvs.pm +++ b/lib/MIP/Recipes/Download/Cadd_whole_genome_snvs.pm @@ -117,7 +117,7 @@ sub download_cadd_whole_genome_snvs { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -130,17 +130,14 @@ sub download_cadd_whole_genome_snvs { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_cadd_whole_genome_snvs { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_cadd_whole_genome_snvs { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Chromograph_cytoband.pm b/lib/MIP/Recipes/Download/Chromograph_cytoband.pm index 37ad6eeff..1d23bdd99 100644 --- a/lib/MIP/Recipes/Download/Chromograph_cytoband.pm +++ b/lib/MIP/Recipes/Download/Chromograph_cytoband.pm @@ -116,10 +116,10 @@ sub download_chromograph_cytoband { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -129,17 +129,14 @@ sub download_chromograph_cytoband { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -147,18 +144,18 @@ sub download_chromograph_cytoband { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -180,7 +177,7 @@ sub download_chromograph_cytoband { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Clinvar.pm b/lib/MIP/Recipes/Download/Clinvar.pm index abd0a00f8..d27bfbb36 100644 --- a/lib/MIP/Recipes/Download/Clinvar.pm +++ b/lib/MIP/Recipes/Download/Clinvar.pm @@ -118,13 +118,13 @@ sub download_clinvar { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_rm }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; use MIP::Program::Bcftools qw{ bcftools_annotate }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -134,17 +134,14 @@ sub download_clinvar { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -152,19 +149,19 @@ sub download_clinvar { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -251,7 +248,7 @@ sub download_clinvar { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Ctat_resource_lib.pm b/lib/MIP/Recipes/Download/Ctat_resource_lib.pm index f2140c6df..309bbd977 100644 --- a/lib/MIP/Recipes/Download/Ctat_resource_lib.pm +++ b/lib/MIP/Recipes/Download/Ctat_resource_lib.pm @@ -117,11 +117,11 @@ sub download_ctat_resource_lib { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_ctat_resource_lib { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_ctat_resource_lib { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -195,7 +192,7 @@ sub download_ctat_resource_lib { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Dbnsfp.pm b/lib/MIP/Recipes/Download/Dbnsfp.pm index 4b0d45621..715084dc4 100644 --- a/lib/MIP/Recipes/Download/Dbnsfp.pm +++ b/lib/MIP/Recipes/Download/Dbnsfp.pm @@ -123,14 +123,14 @@ sub download_dbnsfp { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cat gnu_head gnu_sort }; use MIP::Program::Gnu::Software::Gnu_grep qw{ gnu_grep }; use MIP::Language::Awk qw{ awk }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -140,17 +140,14 @@ sub download_dbnsfp { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -158,18 +155,18 @@ sub download_dbnsfp { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, temp_directory => $temp_directory, } ); @@ -230,7 +227,7 @@ sub download_dbnsfp { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Dbsnp.pm b/lib/MIP/Recipes/Download/Dbsnp.pm index 8c2012fbf..5b5d3e443 100644 --- a/lib/MIP/Recipes/Download/Dbsnp.pm +++ b/lib/MIP/Recipes/Download/Dbsnp.pm @@ -117,7 +117,7 @@ sub download_dbsnp { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -131,17 +131,14 @@ sub download_dbsnp { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_dbsnp { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -183,7 +180,7 @@ sub download_dbsnp { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Delly_exclude.pm b/lib/MIP/Recipes/Download/Delly_exclude.pm index 3436a66fc..9ea0a47ae 100644 --- a/lib/MIP/Recipes/Download/Delly_exclude.pm +++ b/lib/MIP/Recipes/Download/Delly_exclude.pm @@ -117,10 +117,10 @@ sub download_delly_exclude { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_delly_exclude { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_delly_exclude { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_delly_exclude { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Expansionhunter.pm b/lib/MIP/Recipes/Download/Expansionhunter.pm index 58a8e8c49..a87a08272 100644 --- a/lib/MIP/Recipes/Download/Expansionhunter.pm +++ b/lib/MIP/Recipes/Download/Expansionhunter.pm @@ -117,10 +117,10 @@ sub download_expansionhunter { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_expansionhunter { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,19 +145,19 @@ sub download_expansionhunter { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, , recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -182,7 +179,7 @@ sub download_expansionhunter { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Gatk_mitochondrial_ref.pm b/lib/MIP/Recipes/Download/Gatk_mitochondrial_ref.pm index 4ac44e401..a0e140b5e 100644 --- a/lib/MIP/Recipes/Download/Gatk_mitochondrial_ref.pm +++ b/lib/MIP/Recipes/Download/Gatk_mitochondrial_ref.pm @@ -117,10 +117,10 @@ sub download_gatk_mitochondrial_ref { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_gatk_mitochondrial_ref { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_gatk_mitochondrial_ref { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_gatk_mitochondrial_ref { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Genbank_haplogroup.pm b/lib/MIP/Recipes/Download/Genbank_haplogroup.pm index 0a3d616aa..664aaf1fe 100644 --- a/lib/MIP/Recipes/Download/Genbank_haplogroup.pm +++ b/lib/MIP/Recipes/Download/Genbank_haplogroup.pm @@ -117,10 +117,10 @@ sub download_genbank_haplogroup { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_genbank_haplogroup { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_genbank_haplogroup { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_genbank_haplogroup { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Gencode_annotation.pm b/lib/MIP/Recipes/Download/Gencode_annotation.pm index f50baee63..fe7cd7ba0 100644 --- a/lib/MIP/Recipes/Download/Gencode_annotation.pm +++ b/lib/MIP/Recipes/Download/Gencode_annotation.pm @@ -117,11 +117,11 @@ sub download_gencode_annotation { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gtf2bed qw{ gtf2bed }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_gencode_annotation { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_gencode_annotation { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -223,7 +220,7 @@ sub download_gencode_annotation { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Genomic_superdups.pm b/lib/MIP/Recipes/Download/Genomic_superdups.pm index d4ec56b1d..f74335d61 100644 --- a/lib/MIP/Recipes/Download/Genomic_superdups.pm +++ b/lib/MIP/Recipes/Download/Genomic_superdups.pm @@ -121,15 +121,14 @@ sub download_genomic_superdups { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Program::Gnu::Coreutils qw{ gnu_cut gnu_sort gnu_uniq }; use MIP::Program::Gnu::Software::Gnu_grep qw{ gnu_grep }; use MIP::File::Path qw{ remove_file_path_suffix }; use MIP::Program::Htslib qw{ htslib_bgzip htslib_tabix }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes - qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -139,36 +138,33 @@ sub download_genomic_superdups { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => q{mip_download}, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - outdata_dir => $reference_dir, - outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, - recipe_data_directory_path => $active_parameter_href->{reference_dir}, - recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, - recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => q{mip_download}, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + outdata_dir => $reference_dir, + outscript_dir => $reference_dir, + process_time => $recipe{time}, + recipe_data_directory_path => $active_parameter_href->{reference_dir}, + recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, + recipe_name => $recipe_name, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -198,9 +194,7 @@ sub download_genomic_superdups { ## Build reformated outfile my $reformated_outfile = join $UNDERSCORE, - ( - $genome_version, $recipe_name, q{reformated}, q{-} . $reference_version . q{-.bed} - ); + ( $genome_version, $recipe_name, q{reformated}, q{-} . $reference_version . q{-.bed} ); my $reformated_outfile_path = catfile( $reference_dir, $reformated_outfile ); if ( $genome_version eq $GENOME_BUILD_VERSION_37 ) { @@ -291,7 +285,7 @@ sub download_genomic_superdups { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Giab.pm b/lib/MIP/Recipes/Download/Giab.pm index 48b18c5a1..7aa8c552d 100644 --- a/lib/MIP/Recipes/Download/Giab.pm +++ b/lib/MIP/Recipes/Download/Giab.pm @@ -117,10 +117,10 @@ sub download_giab { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_giab { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_giab { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_giab { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Gnomad.pm b/lib/MIP/Recipes/Download/Gnomad.pm index 46a4f1bf5..1697ae31c 100644 --- a/lib/MIP/Recipes/Download/Gnomad.pm +++ b/lib/MIP/Recipes/Download/Gnomad.pm @@ -17,8 +17,7 @@ use autodie qw{ :all }; use Readonly; ## MIPs lib/ -use MIP::Constants - qw{ $DASH $DOT $FORWARD_SLASH $NEWLINE $PIPE $SINGLE_QUOTE $SPACE $UNDERSCORE }; +use MIP::Constants qw{ $DASH $DOT $FORWARD_SLASH $NEWLINE $PIPE $SINGLE_QUOTE $SPACE $UNDERSCORE }; BEGIN { @@ -118,11 +117,10 @@ sub download_gnomad { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; - use MIP::Processmanagement::Slurm_processes - qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Program::Bcftools qw{ bcftools_index }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -133,36 +131,33 @@ sub download_gnomad { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => q{mip_download}, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - outdata_dir => $reference_dir, - outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, - recipe_data_directory_path => $active_parameter_href->{reference_dir}, - recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, - recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => q{mip_download}, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + outdata_dir => $reference_dir, + outscript_dir => $reference_dir, + process_time => $recipe{time}, + recipe_data_directory_path => $active_parameter_href->{reference_dir}, + recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, + recipe_name => $recipe_name, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -182,10 +177,7 @@ sub download_gnomad { ); my $reformated_outfile = join $UNDERSCORE, - ( - $genome_version, $recipe_name, q{reformated}, - $DASH . $reference_version . q{-.vcf.gz} - ); + ( $genome_version, $recipe_name, q{reformated}, $DASH . $reference_version . q{-.vcf.gz} ); my $reformated_outfile_path = catfile( $reference_dir, $reformated_outfile ); my %gnomad_post_processing = ( @@ -250,7 +242,7 @@ sub download_gnomad { ## Close filehandle close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( @@ -412,8 +404,7 @@ sub _annotate { ## Annotate ## Only include sites for which at least one of the info keys are above zero - my $include_record = join $SPACE . $PIPE x 2 . $SPACE, - map { $_ . q{>0} } @{$info_keys_ref}; + my $include_record = join $SPACE . $PIPE x 2 . $SPACE, map { $_ . q{>0} } @{$info_keys_ref}; bcftools_annotate( { filehandle => $filehandle, @@ -489,8 +480,7 @@ sub _annotate_and_calculate_afpopmax { ## Annotate ## Only include sites for which at least one of the info keys are above zero - my $include_record = join $SPACE . $PIPE x 2 . $SPACE, - map { $_ . q{>0} } @{$info_keys_ref}; + my $include_record = join $SPACE . $PIPE x 2 . $SPACE, map { $_ . q{>0} } @{$info_keys_ref}; bcftools_annotate( { filehandle => $filehandle, @@ -516,8 +506,8 @@ sub _annotate_and_calculate_afpopmax { ## Generate a random integer between 0-10,000. my $random_integer = int rand $MAX_RANDOM_NUMBER; - my $toml_path = catfile( $recipe_dir_path, - $random_integer . $UNDERSCORE . q{calculate_afpopmax.toml} ); + my $toml_path = + catfile( $recipe_dir_path, $random_integer . $UNDERSCORE . q{calculate_afpopmax.toml} ); write_toml( { data_href => $postannotation, diff --git a/lib/MIP/Recipes/Download/Gnomad_pli_per_gene.pm b/lib/MIP/Recipes/Download/Gnomad_pli_per_gene.pm index 3cfd3c46e..c09af1500 100644 --- a/lib/MIP/Recipes/Download/Gnomad_pli_per_gene.pm +++ b/lib/MIP/Recipes/Download/Gnomad_pli_per_gene.pm @@ -117,12 +117,12 @@ sub download_gnomad_pli_per_gene { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_cat gnu_cut }; use MIP::Program::Gnu::Software::Gnu_grep qw{ gnu_grep }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ## Constants Readonly my $HGNC_SYMBOL_COL_NR => 1; @@ -136,17 +136,14 @@ sub download_gnomad_pli_per_gene { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -154,18 +151,18 @@ sub download_gnomad_pli_per_gene { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -228,7 +225,7 @@ sub download_gnomad_pli_per_gene { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Hapmap.pm b/lib/MIP/Recipes/Download/Hapmap.pm index d17a31a6e..35e944128 100644 --- a/lib/MIP/Recipes/Download/Hapmap.pm +++ b/lib/MIP/Recipes/Download/Hapmap.pm @@ -117,7 +117,7 @@ sub download_hapmap { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -131,17 +131,14 @@ sub download_hapmap { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,19 +146,19 @@ sub download_hapmap { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -183,7 +180,7 @@ sub download_hapmap { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Human_reference.pm b/lib/MIP/Recipes/Download/Human_reference.pm index b3988beb8..59b80096e 100644 --- a/lib/MIP/Recipes/Download/Human_reference.pm +++ b/lib/MIP/Recipes/Download/Human_reference.pm @@ -116,12 +116,11 @@ sub download_human_reference { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::File::Path qw{ remove_file_path_suffix }; - use MIP::Processmanagement::Slurm_processes - qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Program::Samtools qw{ samtools_faidx }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Script::Setup_script qw{ setup_script }; ### PREPROCESSING: @@ -129,13 +128,9 @@ sub download_human_reference { ## Retrieve logger object my $log = Log::Log4perl->get_logger( uc q{mip_download} ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Unpack parameters - my @reference_genome_versions = - @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( +## Unpack parameters + my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, @@ -149,20 +144,20 @@ sub download_human_reference { ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => q{mip_download}, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - outdata_dir => $active_parameter_href->{reference_dir}, - outscript_dir => $active_parameter_href->{reference_dir}, - process_time => $recipe_resource{time}, - recipe_data_directory_path => $active_parameter_href->{reference_dir}, - recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, - recipe_name => $recipe_name, - temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => q{mip_download}, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + outdata_dir => $active_parameter_href->{reference_dir}, + outscript_dir => $active_parameter_href->{reference_dir}, + process_time => $recipe{time}, + recipe_data_directory_path => $active_parameter_href->{reference_dir}, + recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, + recipe_name => $recipe_name, + temp_directory => $temp_directory, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -184,8 +179,7 @@ sub download_human_reference { my $outfile_path = catfile( $active_parameter_href->{reference_dir}, $reference_href->{outfile} ); my $outfile_no_gz = - remove_file_path_suffix( - { file_path => $outfile_path, file_suffixes_ref => [qw{ .gz }], } ) + remove_file_path_suffix( { file_path => $outfile_path, file_suffixes_ref => [qw{ .gz }], } ) // $outfile_path; samtools_faidx( { @@ -198,7 +192,7 @@ sub download_human_reference { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Manta_call_regions.pm b/lib/MIP/Recipes/Download/Manta_call_regions.pm index 560ec2b7f..e78ff979c 100644 --- a/lib/MIP/Recipes/Download/Manta_call_regions.pm +++ b/lib/MIP/Recipes/Download/Manta_call_regions.pm @@ -117,10 +117,10 @@ sub download_manta_call_regions { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_manta_call_regions { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_manta_call_regions { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_manta_call_regions { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Mills_and_1000g_indels.pm b/lib/MIP/Recipes/Download/Mills_and_1000g_indels.pm index ad75a89f3..6bbede171 100644 --- a/lib/MIP/Recipes/Download/Mills_and_1000g_indels.pm +++ b/lib/MIP/Recipes/Download/Mills_and_1000g_indels.pm @@ -3,6 +3,7 @@ package MIP::Recipes::Download::Mills_and_1000g_indels; use 5.026; use Carp; use charnames qw{ :full :short }; +use Cwd; use English qw{ -no_match_vars }; use File::Basename qw{ dirname }; use File::Spec::Functions qw{ catfile }; @@ -117,11 +118,10 @@ sub download_mills_and_1000g_indels { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use Cwd; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -132,17 +132,14 @@ sub download_mills_and_1000g_indels { my $reference_dir = $active_parameter_href->{reference_dir}; my @reference_genome_versions = @{ $active_parameter_href->{reference_genome_versions} }; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -150,19 +147,19 @@ sub download_mills_and_1000g_indels { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, temp_directory => $temp_directory, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -184,7 +181,7 @@ sub download_mills_and_1000g_indels { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Pfam.pm b/lib/MIP/Recipes/Download/Pfam.pm index 9369c5506..19849e686 100644 --- a/lib/MIP/Recipes/Download/Pfam.pm +++ b/lib/MIP/Recipes/Download/Pfam.pm @@ -116,10 +116,9 @@ sub download_pfam { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::File::Path qw{ remove_file_path_suffix }; - use MIP::Processmanagement::Slurm_processes - qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; use MIP::Program::Hmmer qw{ hmmpress }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; @@ -132,36 +131,33 @@ sub download_pfam { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); ## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header my ( $recipe_file_path, $recipe_info_path ) = setup_script( { - active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, - directory_id => q{mip_download}, - filehandle => $filehandle, - job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, - outdata_dir => $reference_dir, - outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, - recipe_data_directory_path => $active_parameter_href->{reference_dir}, - recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, - recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + active_parameter_href => $active_parameter_href, + core_number => $recipe{core_number}, + directory_id => q{mip_download}, + filehandle => $filehandle, + job_id_href => $job_id_href, + memory_allocation => $recipe{memory}, + outdata_dir => $reference_dir, + outscript_dir => $reference_dir, + process_time => $recipe{time}, + recipe_data_directory_path => $active_parameter_href->{reference_dir}, + recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, + recipe_name => $recipe_name, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -201,7 +197,7 @@ sub download_pfam { ## Delete key so that the reference is only downloaded once $active_parameter_href->{reference_feature}{$recipe_name} = undef; - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Rank_model.pm b/lib/MIP/Recipes/Download/Rank_model.pm index ba43bb205..41cbf2c36 100644 --- a/lib/MIP/Recipes/Download/Rank_model.pm +++ b/lib/MIP/Recipes/Download/Rank_model.pm @@ -117,7 +117,7 @@ sub download_rank_model { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -130,17 +130,14 @@ sub download_rank_model { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_rank_model { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_rank_model { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Reduced_penetrance.pm b/lib/MIP/Recipes/Download/Reduced_penetrance.pm index f94f6d0a8..a25c8c94e 100644 --- a/lib/MIP/Recipes/Download/Reduced_penetrance.pm +++ b/lib/MIP/Recipes/Download/Reduced_penetrance.pm @@ -117,10 +117,10 @@ sub download_reduced_penetrance { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_reduced_penetrance { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_reduced_penetrance { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_reduced_penetrance { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Runstatus.pm b/lib/MIP/Recipes/Download/Runstatus.pm index 959832a3b..8a2cb67ab 100644 --- a/lib/MIP/Recipes/Download/Runstatus.pm +++ b/lib/MIP/Recipes/Download/Runstatus.pm @@ -83,7 +83,7 @@ sub download_runstatus { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Language::Shell qw{ check_mip_process_paths }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Processes qw{ submit_recipe }; @@ -96,17 +96,14 @@ sub download_runstatus { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -114,18 +111,18 @@ sub download_runstatus { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -150,7 +147,7 @@ sub download_runstatus { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { submit_recipe( { diff --git a/lib/MIP/Recipes/Download/Scout_exons.pm b/lib/MIP/Recipes/Download/Scout_exons.pm index 9485ad696..e1cc2f19d 100644 --- a/lib/MIP/Recipes/Download/Scout_exons.pm +++ b/lib/MIP/Recipes/Download/Scout_exons.pm @@ -117,10 +117,10 @@ sub download_scout_exons { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -130,17 +130,14 @@ sub download_scout_exons { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_scout_exons { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_scout_exons { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Sv_vcfanno_config.pm b/lib/MIP/Recipes/Download/Sv_vcfanno_config.pm index 1b9598a92..2d4eb7950 100644 --- a/lib/MIP/Recipes/Download/Sv_vcfanno_config.pm +++ b/lib/MIP/Recipes/Download/Sv_vcfanno_config.pm @@ -117,11 +117,11 @@ sub download_sv_vcfanno_config { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv}; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_sv_vcfanno_config { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_sv_vcfanno_config { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -205,7 +202,7 @@ sub download_sv_vcfanno_config { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Svrank_model.pm b/lib/MIP/Recipes/Download/Svrank_model.pm index 1424b2710..a76f177e9 100644 --- a/lib/MIP/Recipes/Download/Svrank_model.pm +++ b/lib/MIP/Recipes/Download/Svrank_model.pm @@ -117,7 +117,7 @@ sub download_svrank_model { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; @@ -130,17 +130,14 @@ sub download_svrank_model { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -148,18 +145,18 @@ sub download_svrank_model { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -181,7 +178,7 @@ sub download_svrank_model { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Vcf2cytosure_blacklist_regions.pm b/lib/MIP/Recipes/Download/Vcf2cytosure_blacklist_regions.pm index 98af5a326..d56eaa1e6 100644 --- a/lib/MIP/Recipes/Download/Vcf2cytosure_blacklist_regions.pm +++ b/lib/MIP/Recipes/Download/Vcf2cytosure_blacklist_regions.pm @@ -116,10 +116,10 @@ sub download_vcf2cytosure_blacklist_regions { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -129,17 +129,14 @@ sub download_vcf2cytosure_blacklist_regions { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -147,18 +144,18 @@ sub download_vcf2cytosure_blacklist_regions { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -180,7 +177,7 @@ sub download_vcf2cytosure_blacklist_regions { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Vcfanno_config.pm b/lib/MIP/Recipes/Download/Vcfanno_config.pm index 25d12bfe9..d0d6ebc08 100644 --- a/lib/MIP/Recipes/Download/Vcfanno_config.pm +++ b/lib/MIP/Recipes/Download/Vcfanno_config.pm @@ -117,11 +117,11 @@ sub download_vcfanno_config { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv}; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_vcfanno_config { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_vcfanno_config { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -205,7 +202,7 @@ sub download_vcfanno_config { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/lib/MIP/Recipes/Download/Vcfanno_functions.pm b/lib/MIP/Recipes/Download/Vcfanno_functions.pm index 4be584e71..09976e9f5 100644 --- a/lib/MIP/Recipes/Download/Vcfanno_functions.pm +++ b/lib/MIP/Recipes/Download/Vcfanno_functions.pm @@ -117,11 +117,11 @@ sub download_vcfanno_functions { check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!}; - use MIP::Get::Parameter qw{ get_recipe_resources }; use MIP::Program::Gnu::Coreutils qw{ gnu_mv}; + use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; + use MIP::Recipe qw{ parse_recipe_prerequisites }; use MIP::Recipes::Download::Get_reference qw{ get_reference }; use MIP::Script::Setup_script qw{ setup_script }; - use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end }; ### PREPROCESSING: @@ -131,17 +131,14 @@ sub download_vcfanno_functions { ## Unpack parameters my $reference_dir = $active_parameter_href->{reference_dir}; - my %recipe_resource = get_recipe_resources( + my %recipe = parse_recipe_prerequisites( { active_parameter_href => $active_parameter_href, recipe_name => $recipe_name, } ); - ## Set recipe mode - my $recipe_mode = $active_parameter_href->{$recipe_name}; - - ## Filehandle(s) +## Filehandle(s) # Create anonymous filehandle my $filehandle = IO::Handle->new(); @@ -149,18 +146,18 @@ sub download_vcfanno_functions { my ( $recipe_file_path, $recipe_info_path ) = setup_script( { active_parameter_href => $active_parameter_href, - core_number => $recipe_resource{core_number}, + core_number => $recipe{core_number}, directory_id => q{mip_download}, filehandle => $filehandle, job_id_href => $job_id_href, - memory_allocation => $recipe_resource{memory}, + memory_allocation => $recipe{memory}, outdata_dir => $reference_dir, outscript_dir => $reference_dir, - process_time => $recipe_resource{time}, + process_time => $recipe{time}, recipe_data_directory_path => $active_parameter_href->{reference_dir}, recipe_directory => $recipe_name . $UNDERSCORE . $reference_version, recipe_name => $recipe_name, - source_environment_commands_ref => $recipe_resource{load_env_ref}, + source_environment_commands_ref => $recipe{load_env_ref}, } ); @@ -182,7 +179,7 @@ sub download_vcfanno_functions { ## Close filehandleS close $filehandle or $log->logcroak(q{Could not close filehandle}); - if ( $recipe_mode == 1 ) { + if ( $recipe{mode} == 1 ) { ## No upstream or downstream dependencies slurm_submit_job_no_dependency_dead_end( diff --git a/t/build_capture_file_prerequisites.t b/t/build_capture_file_prerequisites.t index 3d2450af7..7ffb9e272 100644 --- a/t/build_capture_file_prerequisites.t +++ b/t/build_capture_file_prerequisites.t @@ -38,8 +38,7 @@ BEGIN { test_import( { perl_module_href => \%perl_module, } ); } -use MIP::Recipes::Build::Capture_file_prerequisites - qw{ build_capture_file_prerequisites }; +use MIP::Recipes::Build::Capture_file_prerequisites qw{ build_capture_file_prerequisites }; diag( q{Test build_capture_file_prerequisites from Capture_file_prerequisites.pm} . $COMMA @@ -83,6 +82,7 @@ my %file_info = test_mip_hashes( ); my %job_id; my %parameter = test_mip_hashes( { mip_hash_name => q{recipe_parameter}, } ); +$parameter{$recipe_name} = undef; my %sample_info; diff --git a/t/build_transcript_annotation_prerequisites.t b/t/build_transcript_annotation_prerequisites.t index 59dc9586c..d6b195430 100644 --- a/t/build_transcript_annotation_prerequisites.t +++ b/t/build_transcript_annotation_prerequisites.t @@ -42,7 +42,7 @@ use MIP::Recipes::Build::Transcript_annotation_prerequisites qw{ build_transcript_annotation_prerequisites }; diag( -q{Test build_transcript_annotation_prerequisites from Transcript_annotation_prerequisites.pm} + q{Test build_transcript_annotation_prerequisites from Transcript_annotation_prerequisites.pm} . $COMMA . $SPACE . q{Perl} . $SPACE @@ -73,8 +73,7 @@ my %active_parameter = test_mip_hashes( recipe_name => $recipe_name, } ); -$active_parameter{transcript_annotation} = - q{grch37_gencode_annotation_reformated_-v31-.gtf}; +$active_parameter{transcript_annotation} = q{grch37_gencode_annotation_reformated_-v31-.gtf}; ## Submission via slurm_mock $active_parameter{$recipe_name} = 1; @@ -86,6 +85,7 @@ my %file_info = test_mip_hashes( ); my %job_id; my %parameter = test_mip_hashes( { mip_hash_name => q{recipe_parameter}, } ); +$parameter{$recipe_name} = undef; my %sample_info; diff --git a/t/data/test_data/recipe_active_parameter.yaml b/t/data/test_data/recipe_active_parameter.yaml index 3ef734f6f..71c4e52da 100644 --- a/t/data/test_data/recipe_active_parameter.yaml +++ b/t/data/test_data/recipe_active_parameter.yaml @@ -18,6 +18,7 @@ human_genome_reference: human_genome.fasta gatk_logging_level: INFO java_use_large_pages: 1 max_cores_per_node: 36 +mip: 2 node_ram_memory: 180 picardtools_path: a_test_path project_id: wamdu diff --git a/t/data/test_data/recipe_parameter.yaml b/t/data/test_data/recipe_parameter.yaml index 47f71e949..9bee84623 100644 --- a/t/data/test_data/recipe_parameter.yaml +++ b/t/data/test_data/recipe_parameter.yaml @@ -13,6 +13,8 @@ human_genome_reference_file_endings: associated_recipe: - bwa_mem build_file: 1 +mip: + chain: TEST rtg_vcfeval_reference_genome: build_file: 1 salmon_quant_reference_genome: diff --git a/t/parse_recipe_prerequisites.t b/t/parse_recipe_prerequisites.t index cbe138d8f..e0f567514 100644 --- a/t/parse_recipe_prerequisites.t +++ b/t/parse_recipe_prerequisites.t @@ -56,7 +56,11 @@ my %active_parameter = ## Given a parameter hash my %parameter = test_mip_hashes( { mip_hash_name => q{define_parameter}, recipe_name => $recipe_name, } ); -$parameter{$recipe_name}{chain} = q{TEST}; + +## Given a recipe with a chain, file_tag and outfile_suffix +$parameter{$recipe_name}{chain} = q{TEST}; +$parameter{$recipe_name}{file_tag} = q{_deepvar}; +$parameter{$recipe_name}{outfile_suffix} = q{.g.vcf.gz}; ## When parsing recipe prerequisites my %recipe = parse_recipe_prerequisites( @@ -69,13 +73,15 @@ my %recipe = parse_recipe_prerequisites( ## Then return recipe prerequisites hash my %expected_recipe = ( - core_number => 35, - gpu_number => 1, - job_id_chain => q{TEST}, - load_env_ref => [qw{ conda activate test }], - memory => 175, - mode => 2, - time => 10, + core_number => 35, + file_tag => q{_deepvar}, + gpu_number => 1, + job_id_chain => q{TEST}, + load_env_ref => [qw{ conda activate test }], + memory => 175, + mode => 2, + outfile_suffix => q{.g.vcf.gz}, + time => 10, ); is_deeply( \%recipe, \%expected_recipe, q{Got recipe prerequisites hash} ); From f88fe5f4c7d88f856075856a07fecf922d427f38 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 12:21:00 +0100 Subject: [PATCH 6/7] feat(refactor): --- lib/MIP/Recipe.pm | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/MIP/Recipe.pm b/lib/MIP/Recipe.pm index 74019ec18..dff65d7e2 100644 --- a/lib/MIP/Recipe.pm +++ b/lib/MIP/Recipe.pm @@ -78,13 +78,14 @@ sub parse_recipe_prerequisites { file_tag => q{file_tag}, outfile_suffix => q{outfile_suffix}, ); + ATTRIBUTE: while ( my ( $attribute, $resource ) = each %attribute_map ) { $recipe_resource{$resource} = get_recipe_attributes( { + attribute => $attribute, parameter_href => $parameter_href, recipe_name => $recipe_name, - attribute => $attribute, } ); } From 012612bac4649f6f92d06d667b44b11bb6547b83 Mon Sep 17 00:00:00 2001 From: henrikstranneheim Date: Sun, 10 Jan 2021 19:47:09 +0100 Subject: [PATCH 7/7] feat(recipe_name): Use rtg recipe name instead of mip --- lib/MIP/Recipes/Build/Rtg_prerequisites.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/MIP/Recipes/Build/Rtg_prerequisites.pm b/lib/MIP/Recipes/Build/Rtg_prerequisites.pm index a119684de..64d49b4b7 100644 --- a/lib/MIP/Recipes/Build/Rtg_prerequisites.pm +++ b/lib/MIP/Recipes/Build/Rtg_prerequisites.pm @@ -157,7 +157,7 @@ sub build_rtg_prerequisites { { active_parameter_href => $active_parameter_href, parameter_href => $parameter_href, - recipe_name => q{mip}, + recipe_name => $recipe_name, } );