diff --git a/Changes b/Changes
index 3b7c6bb5..97f12d22 100644
--- a/Changes
+++ b/Changes
@@ -1,6 +1,12 @@
LIST OF CHANGES
---------------
+release 68.1.0
+ - Apply changes to the code and tests, which follow from removing some
+ functionality from npg_tracking::illumina::runfolder, see
+ https://github.com/wtsi-npg/npg_tracking/pull/807. The pipeline retains
+ all its previous functionality.
+
release 68.0.0
- Use st::api::lims->aggregate_libraries() method for both 'merge_lanes' and
'merge_by_library' pipeline options. This is a breaking change as far as
diff --git a/lib/npg_pipeline/base.pm b/lib/npg_pipeline/base.pm
index 3456f5cc..f4a3852b 100644
--- a/lib/npg_pipeline/base.pm
+++ b/lib/npg_pipeline/base.pm
@@ -26,16 +26,12 @@ with qw{
};
Readonly::Array my @NO_SCRIPT_ARG_ATTRS => qw/
- slot
- instrument_string
subpath
tilelayout_rows
tile_count
lane_tilecount
tilelayout_columns
npg_tracking_schema
- flowcell_id
- name
tracking_run
experiment_name
logger
@@ -60,8 +56,6 @@ within npg_pipeline package
=head2 npg_tracking_schema
-=head2 flowcell_id
-
=head2 tracking_run
=head2 logger
diff --git a/t/00-distribution.t b/t/00-distribution.t
index 2a39ea8a..fa0c33f5 100644
--- a/t/00-distribution.t
+++ b/t/00-distribution.t
@@ -10,8 +10,6 @@ eval {
if($EVAL_ERROR) {
plan skip_all => 'Test::Distribution not installed';
} else {
- my @nots = qw(prereq pod);
- local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'};
Test::Distribution->import(only => [qw/versions description/], distversion => 1);
}
diff --git a/t/00-podcoverage.t b/t/00-podcoverage.t
index 9721c59c..54579f13 100644
--- a/t/00-podcoverage.t
+++ b/t/00-podcoverage.t
@@ -4,7 +4,6 @@ use Test::More;
eval "use Test::Pod::Coverage 1.00";
plan skip_all => "Test::Pod::Coverage 1.00 required for testing POD coverage" if $@;
-local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'};
all_pod_coverage_ok();
1;
diff --git a/t/10-base.t b/t/10-base.t
index ec648467..318d255e 100644
--- a/t/10-base.t
+++ b/t/10-base.t
@@ -1,15 +1,14 @@
use strict;
use warnings;
-use Test::More tests => 9;
+use Test::More tests => 8;
use Test::Exception;
use File::Temp qw(tempdir tempfile);
-use Cwd;
+use Cwd qw(getcwd abs_path);
use Log::Log4perl qw(:levels);
use Moose::Util qw(apply_all_roles);
use File::Copy qw(cp);
use t::util;
-use npg_tracking::util::abs_path qw(abs_path);
my $util = t::util->new();
@@ -54,25 +53,6 @@ subtest 'config' => sub {
'Croaks for non-esistent config file as expected';
};
-subtest 'flowcell id and barcode' => sub {
- plan tests => 7;
-
- my $bpath = t::util->new()->temp_directory;
- my $path = join q[/], $bpath, '150206_HS29_15467_A_C5WL2ACXX';
- my $base;
- lives_ok { $base = npg_pipeline::base->new(runfolder_path => $path); }
- 'can create object without supplying run id';
- is ($base->id_run, 15467, 'id run derived correctly from runfolder_path');
- ok (!defined $base->id_flowcell_lims, 'lims flowcell id undefined');
- is ($base->flowcell_id, 'C5WL2ACXX', 'flowcell barcode derived from runfolder path');
-
- $path = join q[/], $bpath, '150204_MS8_15441_A_MS2806735-300V2';
- $base = npg_pipeline::base->new(runfolder_path => $path, id_flowcell_lims => 45);
- is ($base->id_run, 15441, 'id run derived correctly from runfolder_path');
- is ($base->id_flowcell_lims, 45, 'lims flowcell id returned correctly');
- is ($base->flowcell_id, 'MS2806735-300V2', 'MiSeq reagent kit id derived from runfolder path');
-};
-
subtest 'repository preexec' => sub {
plan tests => 1;
diff --git a/t/10-pluggable-central.t b/t/10-pluggable-central.t
index c13a16d5..283b8412 100644
--- a/t/10-pluggable-central.t
+++ b/t/10-pluggable-central.t
@@ -5,8 +5,8 @@ use Test::Exception;
use Log::Log4perl qw(:levels);
use File::Copy qw(cp);
use File::Path qw(make_path remove_tree);
+use Cwd qw(abs_path);
-use npg_tracking::util::abs_path qw(abs_path);
use t::util;
my $util = t::util->new();
diff --git a/t/10-pluggable.t b/t/10-pluggable.t
index 67109a4b..f85967a9 100644
--- a/t/10-pluggable.t
+++ b/t/10-pluggable.t
@@ -2,28 +2,27 @@ use strict;
use warnings;
use Test::More tests => 16;
use Test::Exception;
-use Cwd;
-use List::Util qw(none any);
+use Cwd qw(getcwd abs_path);
+use List::Util qw(any);
use Log::Log4perl qw(:levels);
-use File::Copy qw(cp);
-use English;
-
-use npg_tracking::util::abs_path qw(abs_path);
-use t::util;
+use File::Temp qw(tempdir);
+use English qw(-no_match_vars);
+use File::Copy::Recursive qw(dircopy fmove fcopy);
use_ok('npg_pipeline::pluggable');
-my $util = t::util->new();
-my $test_dir = $util->temp_directory();
+my $test_dir = tempdir(CLEANUP => 1);
-my @tools = map { "$test_dir/$_" } qw/bamtofastq blat norm_fit/;
+my $test_bin = join q[/], $test_dir, q[bin];
+mkdir $test_bin;
+my @tools = map { "$test_bin/$_" } qw/bamtofastq blat norm_fit/;
foreach my $tool (@tools) {
open my $fh, '>', $tool or die 'cannot open file for writing';
print $fh $tool or die 'cannot print';
close $fh or warn 'failed to close file handle';
}
chmod 0755, @tools;
-local $ENV{'PATH'} = join q[:], $test_dir, $ENV{'PATH'};
+local $ENV{'PATH'} = join q[:], $test_bin, $ENV{'PATH'};
Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n',
level => $DEBUG,
@@ -32,15 +31,36 @@ Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n',
my $product_config = q[t/data/release/config/archive_on/product_release.yml];
my $config_dir = 'data/config_files';
-my $runfolder_path = $util->analysis_runfolder_path;
-$util->create_analysis();
+
+my $rf_name = q[210415_A00971_0162_AHNNTMDSXY];
+my $test_rf = q[t/data/novaseq/] . $rf_name;
+my $analysis_dir = join q[/], $test_dir,
+ q[esa-sv-20201215-02/IL_seq_data/analysis];
+my $runfolder_path = join q[/], $analysis_dir, $rf_name;
+dircopy($test_rf, $runfolder_path);
+my $bbcals_relative = q[Data/Intensities/BAM_basecalls_20210417-080715];
+my $nocall_relative = $bbcals_relative . q[/no_cal];
+my $nocall_path = join q[/], $runfolder_path, $nocall_relative;
+mkdir $nocall_path;
+symlink $nocall_path, "$runfolder_path/Latest_Summary";
+
+my $id_run = 37416;
+for my $file (qw(RunInfo.xml RunParameters.xml)) {
+ my $source = join q[/], $runfolder_path, "${id_run}_${file}";
+ my $target = join q[/], $runfolder_path, $file;
+ fmove($source, $target);
+}
+
+my $samplesheet_path = join q[/], $runfolder_path, $bbcals_relative,
+ q[metadata_cache_37416], q[samplesheet_37416.csv];
subtest 'object with no function order set - simple methods' => sub {
plan tests => 7;
my $pluggable = npg_pipeline::pluggable->new(
- id_run => 1234,
- runfolder_path => $test_dir
+ id_run => 1234,
+ runfolder_path => $test_dir,
+ npg_tracking_schema => undef
);
isa_ok($pluggable, q{npg_pipeline::pluggable});
is($pluggable->_pipeline_name, '10-pluggable.t', 'pipeline name');
@@ -56,17 +76,19 @@ subtest 'graph creation from jgf files' => sub {
plan tests => 2;
my $obj = npg_pipeline::pluggable->new(
- id_run => 1234,
- runfolder_path => $test_dir,
- function_list => "$config_dir/function_list_central.json"
+ id_run => 1234,
+ runfolder_path => $test_dir,
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
lives_ok {$obj->function_graph()}
'no error creating a graph for default analysis';
$obj = npg_pipeline::pluggable->new(
- id_run => 1234,
- runfolder_path => $test_dir,
- function_list => "$config_dir/function_list_post_qc_review.json"
+ id_run => 1234,
+ runfolder_path => $test_dir,
+ function_list => "$config_dir/function_list_post_qc_review.json",
+ npg_tracking_schema => undef
);
lives_ok { $obj->function_graph() }
'no error creating a graph for default archival';
@@ -79,7 +101,8 @@ subtest 'graph creation from explicitly given function list' => sub {
id_run => 1234,
runfolder_path => $runfolder_path,
function_order => [qw/run_analysis_in_progress lane_analysis_in_progress/],
- function_list => "$config_dir/function_list_central.json"
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
ok($obj->has_function_order(), 'function order is set');
is(join(q[ ], @{$obj->function_order}), 'run_analysis_in_progress lane_analysis_in_progress',
@@ -110,31 +133,32 @@ subtest 'graph creation from explicitly given function list' => sub {
is (scalar @p, 1, 'one predecessor');
$obj = npg_pipeline::pluggable->new(
- id_run => 1234,
- function_order => [qw/pipeline_end/],
- runfolder_path => $test_dir,
- function_list => "$config_dir/function_list_central.json"
+ id_run => 1234,
+ function_order => [qw/pipeline_end/],
+ runfolder_path => $test_dir,
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
throws_ok { $obj->function_graph() }
qr/Graph is not DAG/,
'pipeline_end cannot be specified in function order';
$obj = npg_pipeline::pluggable->new(
- id_run => 1234,
- function_order => [qw/pipeline_start/],
- runfolder_path => $test_dir,
- no_bsub => 1,
- function_list => "$config_dir/function_list_central.json"
+ function_order => [qw/pipeline_start/],
+ runfolder_path => $test_dir,
+ no_bsub => 1,
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
throws_ok { $obj->function_graph() }
qr/Graph is not DAG/,
'pipeline_start cannot be specified in function order';
$obj = npg_pipeline::pluggable->new(
- id_run => 1234,
- function_order => ['invalid_function'],
- runfolder_path => $test_dir,
- function_list => "$config_dir/function_list_central.json"
+ function_order => ['invalid_function'],
+ runfolder_path => $test_dir,
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
throws_ok {$obj->function_graph()}
qr/Function invalid_function cannot be found in the graph/;
@@ -147,7 +171,8 @@ subtest 'switching off functions' => sub {
runfolder_path => $runfolder_path,
no_irods_archival => 1,
no_warehouse_update => 1,
- function_list => "$config_dir/function_list_central.json"
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
lives_ok { $p->function_graph } 'A graph!';
@@ -164,9 +189,10 @@ subtest 'switching off functions' => sub {
'update to warehouse switched off');
$p = npg_pipeline::pluggable->new(
- runfolder_path => $runfolder_path,
- local => 1,
- function_list => "$config_dir/function_list_central.json"
+ runfolder_path => $runfolder_path,
+ local => 1,
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
ok(($p->_run_function($fn_name_id, $fn_name_id)->[0]->excluded &&
$p->_run_function($fn_ml_name_id, $fn_ml_name_id)->[0]->excluded),
@@ -177,7 +203,8 @@ subtest 'switching off functions' => sub {
runfolder_path => $runfolder_path,
local => 1,
no_warehouse_update => 0,
- function_list => "$config_dir/function_list_central.json"
+ function_list => "$config_dir/function_list_central.json",
+ npg_tracking_schema => undef
);
ok(($p->_run_function($fn_name_id, $fn_name_id)->[0]->excluded &&
$p->_run_function($fn_ml_name_id, $fn_ml_name_id)->[0]->excluded),
@@ -196,18 +223,19 @@ subtest 'specifying functions via function_order' => sub {
);
local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
my $p = npg_pipeline::pluggable->new(
- function_order => \@functions_in_order,
- runfolder_path => $runfolder_path,
- spider => 0,
- no_sf_resource => 1,
- no_bsub => 0,
- is_indexed => 0,
+ function_order => \@functions_in_order,
+ runfolder_path => $runfolder_path,
+ spider => 0,
+ no_sf_resource => 1,
+ no_bsub => 0,
+ is_indexed => 0,
product_conf_file_path => $product_config,
- function_list => "$config_dir/function_list_post_qc_review.json"
+ function_list => "$config_dir/function_list_post_qc_review.json",
+ npg_tracking_schema => undef
);
- is($p->id_run, 1234, 'run id set correctly');
+ is($p->id_run, $id_run, 'run id set correctly');
is($p->is_indexed, 0, 'is not indexed');
is(join(q[ ], @{$p->function_order()}), join(q[ ], @functions_in_order),
q{function_order set on creation});
@@ -218,12 +246,13 @@ subtest 'creating executor object' => sub {
plan tests => 13;
my $ref = {
- function_order => [qw/run_archival_in_progress/],
- runfolder_path => $runfolder_path,
- bam_basecall_path => $runfolder_path,
- spider => 0,
+ function_order => [qw/run_archival_in_progress/],
+ runfolder_path => $runfolder_path,
+ bam_basecall_path => $runfolder_path,
+ spider => 0,
product_conf_file_path => $product_config,
- function_list => "$config_dir/function_list_post_qc_review.json"
+ function_list => "$config_dir/function_list_post_qc_review.json",
+ npg_tracking_schema => undef
};
my $p = npg_pipeline::pluggable->new($ref);
@@ -246,7 +275,8 @@ subtest 'creating executor object' => sub {
my $ex = $pl->executor();
isa_ok ($ex, 'npg_pipeline::executor::' . $etype);
ok (!$ex->has_analysis_path, 'analysis path is not set');
- my $path1 = join q[],$runfolder_path,'/t_10-pluggable.t_1234_',$pl->timestamp, q[-];
+ my $path1 = join q[],$runfolder_path, "/t_10-pluggable.t_${id_run}_",
+ $pl->timestamp, q[-];
my $path2 = join q[], '.commands4', uc $etype, 'jobs.', $etype eq 'lsf' ? 'json' : 'txt';
like ($ex->commands4jobs_file_path(), qr/\A$path1(\d+)$path2\Z/,
'file path to save commands for jobs');
@@ -263,7 +293,7 @@ subtest 'propagating options to the lsf executor' => sub {
run_qc_complete
);
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
my $ref = {
function_order => \@functions_in_order,
@@ -324,19 +354,19 @@ subtest 'running the pipeline (lsf executor)' => sub {
};
my $p = npg_pipeline::pluggable->new($ref);
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { $p->main(); } q{no error running main without execution };
$ref->{'execute'} = 1;
$ref->{'no_bsub'} = 1;
$p = npg_pipeline::pluggable->new($ref);
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { $p->main(); } q{no error running main in no_bsub mode};
$ref->{'no_bsub'} = 0;
local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients
$p = npg_pipeline::pluggable->new($ref);
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { $p->main(); } q{no error running main with mock LSF client};
# soft-link bresume command to /bin/false so that it fails
@@ -344,19 +374,19 @@ subtest 'running the pipeline (lsf executor)' => sub {
mkdir $bin;
symlink '/bin/false', "$bin/bresume";
local $ENV{'PATH'} = join q[:], $bin, $ENV{'PATH'};
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
throws_ok { npg_pipeline::pluggable->new($ref)->main() }
qr/Failed to submit command to LSF/, q{error running main};
$ref->{'interactive'} = 1;
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { npg_pipeline::pluggable->new($ref)->main() }
'no failure in interactive mode';
$ref->{'interactive'} = 0;
# soft-link bkill command to /bin/false so that it fails
symlink '/bin/false', "$bin/bkill";
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
throws_ok { npg_pipeline::pluggable->new($ref)->main() }
qr/Failed to submit command to LSF/, q{error running main};
};
@@ -389,17 +419,17 @@ subtest 'running the pipeline (wr executor)' => sub {
local $ENV{'PATH'} = join q[:], $bin, $ENV{'PATH'};
my $p = npg_pipeline::pluggable->new($ref);
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { $p->main(); } q{no error running main without execution };
$ref->{'execute'} = 1;
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
throws_ok { npg_pipeline::pluggable->new($ref)->main() }
qr/Error submitting for execution: Error submitting wr jobs/,
q{error running main};
$ref->{'interactive'} = 1;
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { npg_pipeline::pluggable->new($ref)->main() }
q{interactive mode, no error running main};
@@ -407,11 +437,11 @@ subtest 'running the pipeline (wr executor)' => sub {
unlink $wr;
symlink '/bin/true', $wr;
$ref->{'interactive'} = 0;
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { npg_pipeline::pluggable->new($ref)->main() } q{no error running main};
$ref->{'job_name_prefix'} = 'test';
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
lives_ok { npg_pipeline::pluggable->new($ref)->main() }
q{job name prefix is set, no error running main};
};
@@ -419,80 +449,61 @@ subtest 'running the pipeline (wr executor)' => sub {
subtest 'positions and spidering' => sub {
plan tests => 9;
- cp 't/data/run_params/runParameters.hiseq.xml',
- join(q[/], $runfolder_path, 'runParameters.xml')
- or die 'Faile to copy run params file';
-
local $ENV{'PATH'} = join q[:], 't/bin', $ENV{'PATH'}; # mock LSF clients
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
my $p = npg_pipeline::pluggable->new(
- id_run => 1234,
- id_flowcell_lims => 2015,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- spider => 0,
- function_list => "$config_dir/function_list_central.json"
+ id_flowcell_lims => 2015,
+ runfolder_path => $runfolder_path,
+ spider => 0,
+ function_list => "$config_dir/function_list_central.json"
);
ok(!$p->spider, 'spidering is off');
- is (join( q[ ], $p->positions), '1 2 3 4 5 6 7 8', 'positions array');
+ is (join( q[ ], $p->positions), '1 2 3 4', 'positions array');
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
my $function = 'run_analysis_complete';
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- id_flowcell_lims => 2015,
- run_folder => q{123456_IL2_1234},
- function_order => [$function],
- runfolder_path => $runfolder_path,
- lanes => [1,2],
- spider => 0,
- no_sf_resource => 1,
- product_conf_file_path => $product_config,
- function_list => "$config_dir/function_list_central.json"
+ id_flowcell_lims => 2015,
+ function_order => [$function],
+ runfolder_path => $runfolder_path,
+ lanes => [1,2],
+ spider => 0,
+ no_sf_resource => 1,
+ product_conf_file_path => $product_config,
+ function_list => "$config_dir/function_list_central.json"
);
is (join( q[ ], $p->positions), '1 2', 'positions array');
ok(!$p->interactive, 'start job will be resumed');
lives_ok { $p->main() } "running main for $function, non-interactively";
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- id_flowcell_lims => 2015,
- run_folder => q{123456_IL2_1234},
- function_order => [$function],
- runfolder_path => $runfolder_path,
- lanes => [1,2],
- interactive => 1,
- spider => 0,
- no_sf_resource => 1,
- product_conf_file_path => $product_config,
- function_list => "$config_dir/function_list_central.json"
+ id_flowcell_lims => 2015,
+ function_order => [$function],
+ runfolder_path => $runfolder_path,
+ lanes => [1,2],
+ interactive => 1,
+ spider => 0,
+ no_sf_resource => 1,
+ product_conf_file_path => $product_config,
+ function_list => "$config_dir/function_list_central.json"
);
ok($p->interactive, 'start job will not be resumed');
lives_ok { $p->main() } "running main for $function, interactively";
- local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/samplesheet_1234.csv];
- $util->create_analysis();
- cp 't/data/run_params/runParameters.hiseq.xml',
- join(q[/], $runfolder_path, 'runParameters.xml')
- or die 'Faile to copy run params file';
-
- $util->create_run_info();
-
+ local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = $samplesheet_path;
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- function_order => [qw{qc_qX_yield qc_adapter qc_insert_size}],
- lanes => [4],
- runfolder_path => $runfolder_path,
- no_bsub => 1,
- repository => q{t/data/sequence},
- id_flowcell_lims => 2015,
- spider => 0,
- no_sf_resource => 1,
- product_conf_file_path => $product_config,
- function_list => "$config_dir/function_list_central.json"
+ function_order => [qw{qc_qX_yield qc_adapter qc_insert_size}],
+ lanes => [4],
+ runfolder_path => $runfolder_path,
+ no_bsub => 1,
+ repository => q{t/data/sequence},
+ id_flowcell_lims => 2015,
+ spider => 0,
+ no_sf_resource => 1,
+ product_conf_file_path => $product_config,
+ function_list => "$config_dir/function_list_central.json"
);
mkdir $p->archive_path;
is (join( q[ ], $p->positions), '4', 'positions array');
@@ -542,7 +553,7 @@ subtest 'script name, pipeline name and function list' => sub {
qr/Bad function list name: $test_path/,
'error when function list does not exist, neither it can be interpreted as a function list name';
- cp $path, $test_dir;
+ fcopy($path, $test_dir);
$path = $test_dir . '/function_list_post_qc_review.json';
$base = npg_pipeline::pluggable->new(function_list => $path);
@@ -563,13 +574,11 @@ subtest 'script name, pipeline name and function list' => sub {
subtest 'log file name, directory and path' => sub {
plan tests => 18;
- my $log_name_re = qr/t_10-pluggable\.t_1234_02122020-\d+\.log/;
+ my $log_name_re = qr/t_10-pluggable\.t_${id_run}_02122020-\d+\.log/;
my $p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
);
like ($p->log_file_name, $log_name_re, 'log file name is built correctly');
is ($p->log_file_dir, $runfolder_path, 'default for the log directory');
@@ -577,11 +586,9 @@ subtest 'log file name, directory and path' => sub {
'default log file path');
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_name => 'custom.log',
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_name => 'custom.log',
);
is ($p->log_file_name, 'custom.log', 'log file name as set');
is ($p->log_file_dir, $runfolder_path, 'default for the log directory');
@@ -589,11 +596,9 @@ subtest 'log file name, directory and path' => sub {
'custom log file path');
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_dir => "$runfolder_path/custom",
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_dir => "$runfolder_path/custom",
);
like ($p->log_file_name, $log_name_re, 'default log file name');
is ($p->log_file_dir, "$runfolder_path/custom", 'log directory as set');
@@ -601,12 +606,10 @@ subtest 'log file name, directory and path' => sub {
'custom log file path');
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_dir => "$runfolder_path/custom",
- log_file_name => 'custom.log',
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_dir => "$runfolder_path/custom",
+ log_file_name => 'custom.log',
);
is ($p->log_file_name, 'custom.log', 'log file name as set');
is ($p->log_file_dir, "$runfolder_path/custom" , 'log directory as set');
@@ -615,13 +618,11 @@ subtest 'log file name, directory and path' => sub {
# setting all three does not make sense, but is not prohibited either
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_dir => "$runfolder_path/my_log",
- log_file_name => 'custom.log',
- log_file_path => "$runfolder_path/custom/my.log",
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_dir => "$runfolder_path/my_log",
+ log_file_name => 'custom.log',
+ log_file_path => "$runfolder_path/custom/my.log",
);
is ($p->log_file_name, 'custom.log', 'log file name as set');
is ($p->log_file_dir, "$runfolder_path/my_log", 'log directory as set');
@@ -629,11 +630,9 @@ subtest 'log file name, directory and path' => sub {
'custom log file path as directly set');
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_path => "$runfolder_path/custom/my.log"
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_path => "$runfolder_path/custom/my.log"
);
is ($p->log_file_name, 'my.log', 'log file name is derived from path');
is ($p->log_file_dir, "$runfolder_path/custom",
@@ -646,13 +645,11 @@ subtest 'Copy log file and product_release config' => sub {
plan tests => 7;
my $p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_dir => $test_dir,
- log_file_name => 'logfile',
- product_conf_file_path => $product_config
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_dir => $test_dir,
+ log_file_name => 'logfile',
+ product_conf_file_path => $product_config
);
$p->_copy_log_to_analysis_dir();
my $analysis_path = $p->analysis_path;
@@ -663,24 +660,20 @@ subtest 'Copy log file and product_release config' => sub {
# Set log file path to something false to show error behaviour is fine
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => '/nope',
- timestamp => '02122020',
- log_file_dir => $test_dir,
- log_file_name => 'logfile',
- product_conf_file_path => $product_config
+ runfolder_path => '/nope',
+ timestamp => '02122020',
+ log_file_dir => $test_dir,
+ log_file_name => 'logfile',
+ product_conf_file_path => $product_config
);
lives_ok {$p->_copy_log_to_analysis_dir()} 'Log copy to nonexistant runfolder does not die';
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- timestamp => '02122020',
- log_file_dir => '/nuthin',
- log_file_name => 'logfile',
- product_conf_file_path => $product_config
+ runfolder_path => $runfolder_path,
+ timestamp => '02122020',
+ log_file_dir => '/nuthin',
+ log_file_name => 'logfile',
+ product_conf_file_path => $product_config
);
lives_ok {$p->_copy_log_to_analysis_dir()} 'Log copy of nonexistant file does not die';
@@ -698,7 +691,7 @@ subtest 'Check resource population from graph' => sub {
plan tests => 2;
my $p = npg_pipeline::pluggable->new(
- id_run => 1234,
+ id_run => $id_run,
function_order => ['run_analysis_complete'],
function_list => "$config_dir/function_list_central.json"
);
@@ -712,7 +705,7 @@ subtest 'Checking resources are assigned correctly from graph' => sub {
plan tests => 4;
# Check resources for functions are correctly merged with pipeline-wide settings
my $p = npg_pipeline::pluggable->new(
- id_run => 1234,
+ id_run => $id_run,
function_list => "$config_dir/function_list_central.json"
);
my $resources = $p->_function_resource_requirements('update_ml_warehouse_1', 'update_ml_warehouse');
@@ -732,7 +725,7 @@ subtest 'Checking resources are assigned correctly from graph' => sub {
);
$p = npg_pipeline::pluggable->new(
- id_run => 1234,
+ id_run => $id_run,
function_list => "$config_dir/function_list_post_qc_review.json"
);
$resources = $p->_function_resource_requirements('run_run_archived', 'run_run_archived');
@@ -759,5 +752,4 @@ subtest 'Checking resources are assigned correctly from graph' => sub {
}
qr{Function run requires both label/name and id},
'Missing function name causes resource failure';
-
};
diff --git a/t/20-function-current_analysis_link.t b/t/20-function-current_analysis_link.t
index f8a7a61d..5b37efa6 100644
--- a/t/20-function-current_analysis_link.t
+++ b/t/20-function-current_analysis_link.t
@@ -1,85 +1,76 @@
use strict;
use warnings;
-use Test::More tests => 24;
+use Test::More tests => 23;
use Test::Exception;
-use File::Path qw(make_path);
-use t::util;
-
-my $util = t::util->new();
-my $tmp_dir = $util->temp_directory();
+use File::Copy::Recursive qw(dircopy fmove);
+use File::Temp qw(tempdir);
use_ok('npg_pipeline::function::current_analysis_link');
-my $runfolder_path = $util->analysis_runfolder_path();
-my $link_to = 'Data/Intensities/BAM_basecalls_20150608-091427/no_cal';
-my $recalibrated_path = join q[/], $runfolder_path, $link_to;
-make_path($recalibrated_path);
+my $temp_dir = tempdir(CLEANUP => 1);
-{
- my $test = sub {
- my ($obj) = @_;
- my $ds = $obj->create();
- ok($ds && scalar @{$ds} == 1 && $ds->[0]->excluded,
- 'creating summary link switched off');
- isa_ok ($ds->[0], 'npg_pipeline::function::definition');
- };
+my $rf_name = q[210415_A00971_0162_AHNNTMDSXY];
+my $test_rf = q[t/data/novaseq/] . $rf_name;
+my $analysis_dir = join q[/], $temp_dir,
+ q[esa-sv-20201215-02/IL_seq_data/analysis];
+my $runfolder_path = join q[/], $analysis_dir, $rf_name;
+dircopy($test_rf, $runfolder_path);
+my $nocall_relative = q[Data/Intensities/BAM_basecalls_20210417-080715/no_cal];
+my $nocall_path = join q[/], $runfolder_path, $nocall_relative;
+mkdir $nocall_path;
- my $rfl;
- $rfl = npg_pipeline::function::current_analysis_link->new(
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- recalibrated_path => $recalibrated_path,
- no_summary_link => 1,
- resource => {
- default => {
- minimum_cpu => 1,
- memory => 1
- }
- }
+my $id_run = 37416;
+for my $file (qw(RunInfo.xml RunParameters.xml)) {
+ my $source = join q[/], $runfolder_path, "${id_run}_${file}";
+ my $target = join q[/], $runfolder_path, $file;
+ fmove($source, $target);
+}
+
+my $resource = {default => {minimum_cpu => 1, memory => 1, queue => 'small'}};
+
+sub test_job_skipped {
+ my $obj = shift;
+ my $ds = $obj->create();
+ ok($ds && scalar @{$ds} == 1 && $ds->[0]->excluded,
+ 'creating summary link switched off');
+ isa_ok ($ds->[0], 'npg_pipeline::function::definition');
+}
+
+{
+ my $rfl = npg_pipeline::function::current_analysis_link->new(
+ runfolder_path => $runfolder_path,
+ no_summary_link => 1,
+ resource => $resource,
+ npg_tracking_schema => undef
);
- $test->($rfl);
+ test_job_skipped($rfl);
$rfl = npg_pipeline::function::current_analysis_link->new(
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- recalibrated_path => $recalibrated_path,
- local => 1,
- resource => {
- default => {
- minimum_cpu => 1,
- memory => 1
- }
- }
+ runfolder_path => $runfolder_path,
+ local => 1,
+ resource => $resource,
+ npg_tracking_schema => undef
);
- $test->($rfl);
+ test_job_skipped($rfl);
$rfl = npg_pipeline::function::current_analysis_link->new(
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- recalibrated_path => $recalibrated_path,
- resource => {
- default => {
- minimum_cpu => 1,
- memory => 1,
- queue => 'small'
- }
- }
+ runfolder_path => $runfolder_path,
+ resource => $resource,
+ npg_tracking_schema => undef
);
my $ds = $rfl->create();
ok($ds && scalar @{$ds} == 1 && !$ds->[0]->excluded,
'creating summary link is enabled');
my $d = $ds->[0];
- isa_ok ($d, 'npg_pipeline::function::definition');
- is ($d->identifier, '1234', 'identifier set to run id');
+ is ($d->identifier, $id_run, 'identifier set to run id');
is ($d->created_by, 'npg_pipeline::function::current_analysis_link',
'created_by');
my $command = 'npg_pipeline_create_summary_link ' .
- '--run_folder 123456_IL2_1234 ' .
+ "--run_folder $rf_name " .
"--runfolder_path $runfolder_path " .
- "--recalibrated_path $recalibrated_path";
+ "--recalibrated_path $nocall_path";
is ($d->command, $command, 'command');
- is ($d->job_name, 'create_latest_summary_link_1234_123456_IL2_1234',
- 'job name');
+ is ($d->job_name, "create_latest_summary_link_${id_run}_${rf_name}", 'job name');
is ($d->queue, 'small', 'small queue');
}
@@ -88,40 +79,35 @@ make_path($recalibrated_path);
ok(!-e $link, 'link does not exist - test prerequisite');
my $rfl = npg_pipeline::function::current_analysis_link->new(
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- recalibrated_path => $recalibrated_path,
- resource => {
- default => {
- minimum_cpu => 1,
- memory => 1
- }
- }
+ runfolder_path => $runfolder_path,
+ resource => $resource,
+ npg_tracking_schema => undef
);
- lives_ok { $rfl->make_link(); } q{no croak creating link};
+ lives_ok { $rfl->make_link() } q{no error creating link};
ok(-l $link, 'link exists');
- is(readlink $link, $link_to, 'correct link target');
- lives_ok { $rfl->make_link(); } q{no croak creating link when it already exists};
+ is(readlink $link, $nocall_relative, 'correct link target');
+ lives_ok { $rfl->make_link() } q{no error creating link when it already exists};
ok(-l $link, 'link exists');
- rename "$tmp_dir/nfs/sf45/IL2/analysis", "$tmp_dir/nfs/sf45/IL2/outgoing";
+ my $outgoing_dir = $analysis_dir;
+ $outgoing_dir =~ s/analysis/outgoing/;
+ rename $analysis_dir, $outgoing_dir;
$link =~ s/analysis/outgoing/;
$runfolder_path =~ s/analysis/outgoing/;
- $recalibrated_path =~ s/analysis/outgoing/;
$rfl = npg_pipeline::function::current_analysis_link->new(
- run_folder => q{123456_IL2_1234},
- runfolder_path => $runfolder_path,
- recalibrated_path => $recalibrated_path,
+ runfolder_path => $runfolder_path,
+ npg_tracking_schema => undef
);
- lives_ok { $rfl->make_link();} q{no croak creating link in outgoing when it already exists};
+ lives_ok { $rfl->make_link() }
+ q{no error creating link in outgoing when it already exists};
ok(-l $link, 'link exists');
unlink $link;
ok(!-e $link, 'link deleted - test prerequisite');
- lives_ok { $rfl->make_link(); } q{no croak creating link in outgoing};
+ lives_ok { $rfl->make_link() } q{no error creating link in outgoing};
ok(-l $link, 'link exists');
- is(readlink $link, $link_to, 'correct link target');
+ is(readlink $link, $nocall_relative, 'correct link target');
}
1;
diff --git a/t/20-function-p4_stage1_analysis.t b/t/20-function-p4_stage1_analysis.t
index 2669dc84..12ac4976 100644
--- a/t/20-function-p4_stage1_analysis.t
+++ b/t/20-function-p4_stage1_analysis.t
@@ -2,13 +2,12 @@ use strict;
use warnings;
use Test::More tests => 5;
use Test::Exception;
-use Cwd;
+use Cwd qw(getcwd abs_path);
use File::Copy qw(cp);
-use File::Copy::Recursive qw[dircopy];
+use File::Copy::Recursive qw(dircopy);
use Perl6::Slurp;
use JSON;
-use npg_tracking::util::abs_path qw(abs_path);
use t::util;
my $util = t::util->new(clean_temp_directory => 1);
diff --git a/t/20-function-run_data_to_irods_archiver.t b/t/20-function-run_data_to_irods_archiver.t
index 270950c3..96b3d91c 100644
--- a/t/20-function-run_data_to_irods_archiver.t
+++ b/t/20-function-run_data_to_irods_archiver.t
@@ -3,9 +3,8 @@ use warnings;
use Test::More tests => 4;
use Test::Exception;
use File::Copy;
-use Cwd;
+use Cwd qw(getcwd abs_path);
-use npg_tracking::util::abs_path qw(abs_path);
use t::util;
use_ok('npg_pipeline::function::run_data_to_irods_archiver');
diff --git a/t/20-function-seq_alignment.t b/t/20-function-seq_alignment.t
index a0d53a8d..8cf940f3 100644
--- a/t/20-function-seq_alignment.t
+++ b/t/20-function-seq_alignment.t
@@ -13,6 +13,7 @@ use Log::Log4perl qw/:levels/;
use JSON;
use Cwd;
use List::Util qw/first/;
+use File::Slurp qw/edit_file_lines/;
use Moose::Util qw(apply_all_roles);
@@ -192,21 +193,24 @@ subtest 'basic functionality' => sub {
copy('t/data/rna_seq/12597_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die
'Copy failed';
- copy('t/data/run_params/runParameters.hiseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24235}{12597}
+ }, $run_params_file;
my $rna_gen;
lives_ok {
$rna_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2014},
verbose => 0,
repository => $dir,
- force_phix_split => 0,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
@@ -274,18 +278,16 @@ subtest 'basic functionality' => sub {
is ($d->num_hosts, 1, 'one host');
is ($d->fs_slots_num, 4, 'four sf slots');
- #### force on phix_split
lives_ok {
$rna_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2014},
verbose => 0,
repository => $dir,
- force_phix_split => 1,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object (forcing on phix split)';
@@ -339,10 +341,14 @@ subtest 'RNASeq analysis' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20140606-133530/metadata_cache_13066';
`mkdir -p $cache_dir`;
copy('t/data/rna_seq/13066_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.hiseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseq.xml', $run_params_file)
or die 'Copy failed';
-
- # Edited to add 1000Genomes_hs37d5 + ensembl_75_transcriptome to lane 8
+ edit_file_lines sub {
+ $_ =~ s{24235}{13066}
+ }, $run_params_file;
+
+# Edited to add 1000Genomes_hs37d5 + ensembl_75_transcriptome to lane 8
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/rna_seq/samplesheet_13066.csv];
my $qc_in = qq{$bc_path/archive/lane8};
@@ -351,13 +357,13 @@ subtest 'RNASeq analysis' => sub {
my $rna_gen;
lives_ok {
$rna_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2014},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($rna_gen->id_run, 13066, 'id_run inferred correctly');
@@ -400,8 +406,12 @@ subtest 'RNASeq analysis' => sub {
`mkdir -p $bc_path`;
`mkdir -p $cache_dir`;
copy('t/data/rna_seq/17550_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.hiseq.xml', "$runfolder_path/runParameters.xml")
+ $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24235}{17550}
+ }, $run_params_file;
for ((3,4,6,8)) {
`mkdir -p $bc_path/lane$_`;
@@ -413,13 +423,13 @@ subtest 'RNASeq analysis' => sub {
lives_ok {
$rna_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2017},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($rna_gen->id_run, 17550, 'id_run inferred correctly');
@@ -498,13 +508,13 @@ subtest 'RNASeq analysis' => sub {
lives_ok {
$rna_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2018},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
@@ -525,21 +535,25 @@ subtest 'test 3' => sub {
my $bc_path = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20151215-215034';
my $cache_dir = join q[/], $bc_path, 'metadata_cache_18472';
`mkdir -p $bc_path/no_cal/lane2`;
- copy('t/data/run_params/runParameters.hiseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24235}{18472}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = join q[/], $cache_dir, q[samplesheet_18472.csv];
my $se_gen;
lives_ok {
$se_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => "$bc_path/no_cal",
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($se_gen->id_run, 18472, 'id_run inferred correctly');
@@ -578,8 +592,12 @@ subtest 'test 4' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20150712-121006/metadata_cache_16839';
`mkdir -p $cache_dir`;
copy('t/data/hiseqx/16839_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24420}{16839}
+ }, $run_params_file;
my $fasta_ref = "$ref_dir/Homo_sapiens/GRCh38_full_analysis_set_plus_decoy_hla/all/fasta/Homo_sapiens.GRCh38_full_analysis_set_plus_decoy_hla.fa";
my $target_file = "$ref_dir/Homo_sapiens/GRCh38_full_analysis_set_plus_decoy_hla/all/target/Homo_sapiens.GRCh38_full_analysis_set_plus_decoy_hla.fa";
@@ -590,13 +608,13 @@ subtest 'test 4' => sub {
my $hsx_gen;
lives_ok {
$hsx_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($hsx_gen->id_run, 16839, 'id_run inferred correctly');
@@ -688,21 +706,25 @@ subtest 'Newer flowcell' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20150707-232614/metadata_cache_16807';
`mkdir -p $cache_dir`;
copy('t/data/hiseq/16807_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24420}{16807}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/hiseq/samplesheet_16807.csv];
my $hs_gen;
lives_ok {
$hs_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($hs_gen->id_run, 16807, 'id_run inferred correctly');
@@ -771,22 +793,26 @@ subtest 'MiSeq WES baits' => sub {
`mkdir -p $cache_dir`;
copy('t/data/hiseq/20268_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{20268}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/hiseq/samplesheet_20268.csv];
my $bait_gen;
lives_ok {
$bait_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2016},
repository => $dir,
verbose => 1,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
@@ -901,21 +927,25 @@ subtest 'cycle count over threshold' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20150712-022206/metadata_cache_16850';
`mkdir -p $cache_dir`;
copy('t/data/miseq/16850_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{16850}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_16850.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($ms_gen->id_run, 16850, 'id_run inferred correctly');
@@ -977,8 +1007,12 @@ subtest 'nonconsented human split, no target alignment' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20150707-132329/metadata_cache_16756';
`mkdir -p $cache_dir`;
copy('t/data/hiseq/16756_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{16756}
+ }, $run_params_file;
# default human reference needed for alignment for unconsented human split
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/hiseq/samplesheet_16756.csv];
@@ -986,13 +1020,13 @@ subtest 'nonconsented human split, no target alignment' => sub {
my $hs_gen;
lives_ok {
$hs_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($hs_gen->id_run, 16756, 'id_run inferred correctly');
@@ -1056,21 +1090,25 @@ subtest 'nonconsented human split, target alignment' => sub {
`mkdir -p $cache_dir`;
copy('t/data/miseq/16866_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{16866}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_16866.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($ms_gen->id_run, 16866, 'id_run inferred correctly');
@@ -1136,21 +1174,25 @@ subtest 'no target alignment, no human split' => sub {
`mkdir $cache_dir`;
copy('t/data/miseq/20990_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{20990}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_20990.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2016},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($ms_gen->id_run, 20990, 'id_run (20990) inferred correctly');
@@ -1203,8 +1245,12 @@ subtest 'chromium' => sub {
my $cache_dir = join q[/], $runfolder_path, 'Data/Intensities/BAM_basecalls_20150712-121006/metadata_cache_16839';
`mkdir -p $cache_dir`;
copy('t/data/hiseqx/16839_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.hiseqx.upgraded.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24420}{16839}
+ }, $run_params_file;
# Chromium libs are not aligned
my $old_ss = q[t/data/hiseqx/samplesheet_16839.csv];
@@ -1221,13 +1267,13 @@ subtest 'chromium' => sub {
my $chromium_gen;
lives_ok {
$chromium_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2015},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($chromium_gen->id_run, 16839, 'id_run inferred correctly');
@@ -1288,21 +1334,25 @@ subtest 'miseq' => sub {
`mkdir $bc_path/lane1`;
copy('t/data/miseq/24135_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{24347}{24135}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_24135_gbs.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2017},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
is ($ms_gen->id_run, 24135, 'id_run inferred correctly');
@@ -1412,21 +1462,25 @@ subtest 'miseq_primer_panel_only' => sub {
`mkdir -p $bc_path/lane1`;
copy('t/data/miseq/24135_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
- copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
+ my $run_params_file = "$runfolder_path/runParameters.xml";
+ copy('t/data/run_params/runParameters.miseq.xml', $run_params_file)
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{\d+}{24135}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_24135_V2.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2017},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
@@ -1449,7 +1503,7 @@ subtest 'miseq_primer_panel_only' => sub {
};
subtest 'product_release_tests' => sub {
- plan tests => 279;
+ plan tests => 269;
my %test_runs = (
16850 => { platform => 'miseq', runfolder_name => '150710_MS2_16850_A_MS3014507-500V2', markdup_method => 'samtools', },
@@ -1487,16 +1541,16 @@ subtest 'product_release_tests' => sub {
my $sa_gen;
lives_ok {
$sa_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $run_details->{runfolder_name},
+ id_run => $run,,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{1776},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
- is ($sa_gen->id_run, $run, 'id_run inferred correctly');
my $dps;
lives_ok { $dps = $sa_gen->products->{data_products} } "no error finding data products for run $run";
@@ -1569,13 +1623,13 @@ subtest 'BWA MEM 2 tests' => sub {
my $ms_gen = npg_pipeline::function::seq_alignment->new(
bwa_mem2 => $bwa_mem_flag,
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2023},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
);
apply_all_roles($ms_gen, 'npg_pipeline::runfolder_scaffold');
$ms_gen->create_product_level();
@@ -1672,7 +1726,6 @@ subtest 'HiC_flags' => sub {
my $HiC_flags_gen;
lives_ok {
$HiC_flags_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2021},
@@ -1680,6 +1733,7 @@ subtest 'HiC_flags' => sub {
verbose => 1,
conf_path => 't/data/release/config/seq_alignment',
resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating an object';
@@ -1781,21 +1835,25 @@ subtest 'Haplotagging test' => sub {
make_path "$bc_path/archive/tileviz";
copy('t/data/miseq/24135_RunInfo.xml', "$runfolder_path/RunInfo.xml") or die 'Copy failed';
+ my $run_params_file = "$runfolder_path/runParameters.xml";
copy('t/data/run_params/runParameters.miseq.xml', "$runfolder_path/runParameters.xml")
or die 'Copy failed';
+ edit_file_lines sub {
+ $_ =~ s{\d+}{24135}
+ }, $run_params_file;
local $ENV{NPG_CACHED_SAMPLESHEET_FILE} = q[t/data/miseq/samplesheet_24135_haplotag.csv];
my $ms_gen;
lives_ok {
$ms_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2017},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating seq_alignment object';
@@ -1910,13 +1968,13 @@ subtest 'single-end markdup_method test' => sub {
my $hs_gen;
lives_ok {
$hs_gen = npg_pipeline::function::seq_alignment->new(
- run_folder => $runfolder,
runfolder_path => $runfolder_path,
recalibrated_path => $bc_path,
timestamp => q{2022},
repository => $dir,
conf_path => 't/data/release/config/seq_alignment',
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
)
} 'no error creating seq_alignment object';
@@ -2026,13 +2084,13 @@ subtest 'test reference caching' => sub {
# st::api::lims.
my $generator = npg_pipeline::function::seq_alignment->new(
id_run => $id_run,
- run_folder => $runfolder_name,
runfolder_path => $runfolder_path,
archive_path => $archive_dir,
repository => $dir,
resource => $default,
conf_path => 't/data/release/config/seq_alignment',
lanes => [1, 4],
+ npg_tracking_schema => undef
);
lives_ok { $generator->generate() }
diff --git a/t/20-function-seq_to_irods_archiver.t b/t/20-function-seq_to_irods_archiver.t
index 064a8f5d..8a59f0d6 100644
--- a/t/20-function-seq_to_irods_archiver.t
+++ b/t/20-function-seq_to_irods_archiver.t
@@ -3,12 +3,11 @@ use warnings;
use Test::More tests => 4;
use Test::Exception;
use File::Copy;
-use Log::Log4perl qw[:levels];
+use Log::Log4perl qw(:levels);
use File::Slurp;
-use Cwd;
+use Cwd qw(abs_path getcwd);
use File::Copy::Recursive qw(dircopy);
-use npg_tracking::util::abs_path qw(abs_path);
use t::util;
use_ok('npg_pipeline::function::seq_to_irods_archiver');
diff --git a/t/20-function-warehouse_archiver.t b/t/20-function-warehouse_archiver.t
index fc084422..9f34f526 100644
--- a/t/20-function-warehouse_archiver.t
+++ b/t/20-function-warehouse_archiver.t
@@ -3,22 +3,18 @@ use warnings;
use Test::More tests => 4;
use Test::Exception;
use Log::Log4perl qw(:levels);
+use File::Temp qw(tempdir);
+use File::Copy::Recursive qw(dircopy fmove);
-use t::util;
-
-my $util = t::util->new();
+my $temp_dir = tempdir(CLEANUP => 1);
Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n',
level => $DEBUG,
- file => join(q[/], $util->temp_directory(), 'logfile'),
+ file => join(q[/], $temp_dir, 'logfile'),
utf8 => 1});
-my $runfolder_path = $util->analysis_runfolder_path();
my $pqq_suffix = q[_post_qc_complete];
my @wh_methods = qw/update_ml_warehouse/;
@wh_methods = map {$_, $_ . $pqq_suffix} @wh_methods;
-
-use_ok('npg_pipeline::function::warehouse_archiver');
-
my $default = {
default => {
minimum_cpu => 0,
@@ -27,14 +23,32 @@ my $default = {
}
};
+my $rf_name = q[210415_A00971_0162_AHNNTMDSXY];
+my $test_rf = q[t/data/novaseq/] . $rf_name;
+my $runfolder_path = join q[/], $temp_dir,
+ q[esa-sv-20201215-02/IL_seq_data/analysis], $rf_name;
+dircopy($test_rf, $runfolder_path);
+my $nocal_path = join q[/], $runfolder_path,
+ q[Data/Intensities/BAM_basecalls_20210417-080715/no_cal];
+mkdir $nocal_path;
+symlink $nocal_path, "$runfolder_path/Latest_Summary";
+
+my $id_run = 37416;
+for my $file (qw(RunInfo.xml RunParameters.xml)) {
+ my $source = join q[/], $runfolder_path, "${id_run}_${file}";
+ my $target = join q[/], $runfolder_path, $file;
+ fmove($source, $target);
+}
+
+use_ok('npg_pipeline::function::warehouse_archiver');
+
subtest 'warehouse updates' => sub {
plan tests => 19;
my $c = npg_pipeline::function::warehouse_archiver->new(
- run_folder => q{123456_IL2_1234},
runfolder_path => $runfolder_path,
- recalibrated_path => $runfolder_path,
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
);
isa_ok ($c, 'npg_pipeline::function::warehouse_archiver');
@@ -47,12 +61,12 @@ subtest 'warehouse updates' => sub {
my $postqcc = $m =~ /$pqq_suffix/smx;
my $command = 'npg_runs2mlwarehouse';
- my $job_name = $command . '_1234_pname';
- $command .= ' --verbose --id_run 1234';
+ my $job_name = join q[_], $command, $id_run, 'pname';
+ $command .= " --verbose --id_run $id_run";
if ($postqcc) {
$job_name .= '_postqccomplete';
} else {
- $command .= ' && npg_run_params2mlwarehouse --id_run 1234 --path_glob ' .
+ $command .= " && npg_run_params2mlwarehouse --id_run $id_run --path_glob " .
"'$runfolder_path/{r,R}unParameters.xml'";
}
@@ -62,7 +76,7 @@ subtest 'warehouse updates' => sub {
my $d = $ds->[0];
isa_ok ($d, 'npg_pipeline::function::definition');
- is ($d->identifier, '1234', 'identifier set to run id');
+ is ($d->identifier, $id_run, 'identifier set to run id');
is ($d->created_by, 'npg_pipeline::function::warehouse_archiver', 'created_by');
is ($d->command, $command, "command for $m");
is ($d->job_name, $job_name, "job name for $m");
@@ -87,7 +101,8 @@ subtest 'warehouse updates disabled' => sub {
my $c = npg_pipeline::function::warehouse_archiver->new(
runfolder_path => $runfolder_path,
no_warehouse_update => 1,
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
);
$test_method->($c, $m, 'off');
@@ -102,7 +117,8 @@ subtest 'warehouse updates disabled' => sub {
runfolder_path => $runfolder_path,
local => 1,
no_warehouse_update => 0,
- resource => $default
+ resource => $default,
+ npg_tracking_schema => undef
);
$test_method->($c, $m, 'on');
}
@@ -111,11 +127,13 @@ subtest 'warehouse updates disabled' => sub {
subtest 'mlwh updates for a product' => sub {
plan tests => 7;
+ my $rpt_list = join(q[:], $id_run, 4, 5);
my $wa = npg_pipeline::function::warehouse_archiver->new(
- runfolder_path => $runfolder_path,
- label => 'my_label',
- product_rpt_list => '123:4:5',
- resource => $default
+ runfolder_path => $runfolder_path,
+ label => 'my_label',
+ product_rpt_list => $rpt_list,
+ resource => $default,
+ npg_tracking_schema => undef
);
my $ds = $wa->update_ml_warehouse('pname');
@@ -125,7 +143,7 @@ subtest 'mlwh updates for a product' => sub {
isa_ok ($d, 'npg_pipeline::function::definition');
is ($d->identifier, 'my_label', 'identifier set to the label value');
is ($d->command,
- "npg_products2mlwarehouse --verbose --rpt_list '123:4:5'", 'command');
+ "npg_products2mlwarehouse --verbose --rpt_list '$rpt_list'", 'command');
is ($d->job_name, 'npg_runs2mlwarehouse_my_label_pname', 'job name');
is ($d->queue, 'lowload', 'queue');
is_deeply ($d->num_cpus, [0], 'zero CPUs required');
diff --git a/t/50-npg_pipeline-daemon-analysis.t b/t/50-npg_pipeline-daemon-analysis.t
index a208deeb..76b8809d 100644
--- a/t/50-npg_pipeline-daemon-analysis.t
+++ b/t/50-npg_pipeline-daemon-analysis.t
@@ -2,17 +2,16 @@ use strict;
use warnings;
use Test::More tests => 12;
use Test::Exception;
-use Cwd;
+use Cwd qw{ getcwd abs_path };
use File::Path qw{ make_path };
use Log::Log4perl qw{ :levels };
use English qw{ -no_match_vars };
+use File::Temp qw{ tempdir };
use t::util;
use t::dbic_util;
-use npg_tracking::util::abs_path qw(abs_path);
-my $util = t::util->new();
-my $temp_directory = $util->temp_directory();
+my $temp_directory = tempdir(CLEANUP => 1);
Log::Log4perl->easy_init({layout => '%d %-5p %c - %m%n',
level => $DEBUG,
@@ -26,11 +25,8 @@ use_ok($package);
my $script = join q[/], $temp_directory, 'npg_pipeline_central';
`touch $script`;
`chmod +x $script`;
-my $current_dir = abs_path(getcwd());
-local $ENV{PATH} = join q[:], $temp_directory, $current_dir.'/t/bin', $ENV{PATH};
-my $dbic_util = t::dbic_util->new();
-my $schema = $dbic_util->test_schema();
+my $schema = t::dbic_util->new()->test_schema();
my $test_run = $schema->resultset(q[Run])->find(1234);
$test_run->update_run_status('analysis pending', 'pipeline',);
@@ -103,8 +99,8 @@ subtest 'generate command' => sub {
$test_run->update({'batch_id' => 55});
my $runner = $package->new(
- pipeline_script_name => '/bin/true',
- npg_tracking_schema => $schema,
+ pipeline_script_name => '/bin/true',
+ npg_tracking_schema => $schema,
);
my $data = {batch_id => $runner->_get_batch_id($test_run)};
$data->{'job_priority'} = 4;
@@ -112,8 +108,10 @@ subtest 'generate command' => sub {
my $original_path = $ENV{'PATH'};
my $perl_bin = abs_path($EXECUTABLE_NAME);
$perl_bin =~ s/\/perl\Z//smx;
- my $path = join q[:], "${current_dir}/t", $perl_bin, $original_path;
- my $command = q[/bin/true --verbose --job_priority 4 --runfolder_path t --id_flowcell_lims 55];
+ my $path = join q[:], join(q[/], abs_path(getcwd()), q[t]),
+ $perl_bin, $original_path;
+ my $command =
+ q[/bin/true --verbose --job_priority 4 --runfolder_path t --id_flowcell_lims 55];
is($runner->_generate_command($data),
qq[export PATH=${path}; $command], 'command');
};