Skip to content

Commit

Permalink
trying to remove conda from analysisrunstatus
Browse files Browse the repository at this point in the history
  • Loading branch information
jemten committed Nov 10, 2022
1 parent e82203a commit 3db1c28
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 25 deletions.
2 changes: 2 additions & 0 deletions definitions/analyse_parameters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,8 @@ analysisrunstatus:
data_type: SCALAR
default: 1
file_tag: nofile_tag
program_executables:
- perl
type: recipe
## Sacct
sacct:
Expand Down
52 changes: 28 additions & 24 deletions lib/MIP/Recipes/Analysis/Analysisrunstatus.pm
Original file line number Diff line number Diff line change
Expand Up @@ -140,16 +140,15 @@ sub analysis_analysisrunstatus {
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ($recipe_file_path) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => $case_id,
filehandle => $filehandle,
job_id_href => $job_id_href,
memory_allocation => $recipe{memory_allocation},
process_time => $recipe{time},
recipe_directory => $recipe_name,
recipe_name => $recipe_name,
source_environment_commands_ref => $recipe{load_env_ref},
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => $case_id,
filehandle => $filehandle,
job_id_href => $job_id_href,
memory_allocation => $recipe{memory_allocation},
process_time => $recipe{time},
recipe_directory => $recipe_name,
recipe_name => $recipe_name,
}
);

Expand Down Expand Up @@ -410,8 +409,11 @@ sub _check_vcf_header_and_keys {

check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};

use MIP::Environment::Executable qw{ get_executable_base_command };
use MIP::File::Path qw{ remove_file_path_suffix };

my @commands = ( get_executable_base_command( { base_command => q{perl}, } ), );

RECIPE:
foreach my $recipe_name ( keys %{$vcf_file_href} ) {

Expand All @@ -427,45 +429,47 @@ sub _check_vcf_header_and_keys {
file_suffixes_ref => [qw{ .gz}],
}
);
my $command = join q{ }, @commands;

print {$filehandle} q?perl -MTest::Harness -e ' ?;
$command .= q? -MTest::Harness -e ' ?;

## Adjust arguments to harness object
print {$filehandle} q?my %args = (?;
$command .= q?my %args = (?;

## Print individual test results to STDOUT
print {$filehandle} q?verbosity => 1, ?;
$command .= q?verbosity => 1, ?;

##Argument to test script
print {$filehandle} q?test_args => { ?;
$command .= q?test_args => { ?;

## Add test for select file using alias
print {$filehandle} q?"test ? . $mode . $SPACE . $recipe_name . q?" => [ ?;
$command .= q?"test ? . $mode . $SPACE . $recipe_name . q?" => [ ?;

## Infile
print {$filehandle} q?"? . $vcf_file_path . q?", ?;
$command .= q?"? . $vcf_file_path . q?", ?;

##ConfigFile
print {$filehandle} q?"? . $analysis_config_file . q?", ?;
print {$filehandle} q?], ?;
$command .= q?"? . $analysis_config_file . q?", ?;
$command .= q?], ?;

print {$filehandle} q?}); ?;
$command .= q?}); ?;

## Create harness using arguments provided
print {$filehandle} q?my $harness = TAP::Harness->new( \%args ); ?;
$command .= q?my $harness = TAP::Harness->new( \%args ); ?;

## Execute test(s)
print {$filehandle} q?$harness->runtests( ?;
$command .= q?$harness->runtests( ?;

print {$filehandle} q?["?
$command .= q?["?
. catfile( $Bin, qw{ t mip_analysis.test } )
. q?", "test ?
. $mode
. $SPACE
. $recipe_name . q?"], ?;

print {$filehandle} q?)'?;
say {$filehandle} $NEWLINE;
$command .= q?)'?;

say {$filehandle} $command . $NEWLINE;
}
}
return;
Expand Down
2 changes: 1 addition & 1 deletion lib/MIP/Script/Setup_script.pm
Original file line number Diff line number Diff line change
Expand Up @@ -663,7 +663,7 @@ sub setup_script {
{
bash_bin_path => catfile( rootdir(), qw{ bin bash } ),
filehandle => $filehandle,
invoke_login_shell => 1,
invoke_login_shell => 0,
}
);
print {$filehandle} $NEWLINE;
Expand Down

0 comments on commit 3db1c28

Please sign in to comment.