Skip to content

Commit

Permalink
Revert "PipelineMake.__call__"
Browse files Browse the repository at this point in the history
This reverts commit 1b2c48d.
  • Loading branch information
marcelm committed Apr 11, 2023
1 parent 56af829 commit 1551fbd
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 47 deletions.
22 changes: 5 additions & 17 deletions src/cutadapt/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,17 +61,7 @@
import itertools
import multiprocessing
from pathlib import Path
from typing import (
Tuple,
Optional,
Sequence,
List,
Any,
Iterator,
Union,
Dict,
Iterable,
)
from typing import Tuple, Optional, Sequence, List, Any, Iterator, Union, Dict, Iterable
from argparse import ArgumentParser, SUPPRESS, HelpFormatter

import dnaio
Expand Down Expand Up @@ -809,7 +799,7 @@ def __init__(self, args, paired, adapters, adapters2):
self.adapters = adapters
self.adapters2 = adapters2

def __call__(self) -> Pipeline:
def make(self) -> Pipeline:
"""
Set up a processing pipeline from parsed command-line arguments.
Expand Down Expand Up @@ -1120,9 +1110,7 @@ def main(cmdlineargs, default_outfile=sys.stdout.buffer) -> Statistics:
adapters, adapters2 = adapters_from_args(args)
log_adapters(adapters, adapters2 if paired else None)

make_pipeline = PipelineMaker(args, paired, adapters, adapters2)
# Create the pipeline once without running it to get errors early
make_pipeline()
pipeline = PipelineMaker(args, paired, adapters, adapters2).make()
adapter_names: List[Optional[str]] = [a.name for a in adapters]
adapter_names2: List[Optional[str]] = [a.name for a in adapters2]
outfiles = open_output_files(
Expand All @@ -1131,12 +1119,12 @@ def main(cmdlineargs, default_outfile=sys.stdout.buffer) -> Statistics:
inpaths = InputPaths(*input_paths, interleaved=is_interleaved_input)
logger.info(
"Processing %s reads on %d core%s ...",
{False: "single-end", True: "paired-end"}[paired],
{False: "single-end", True: "paired-end"}[pipeline.paired],
cores,
"s" if cores > 1 else "",
)
stats = run_pipeline(
make_pipeline, inpaths, outfiles, cores, progress, args.buffer_size
pipeline, inpaths, outfiles, cores, progress, args.buffer_size
)

except KeyboardInterrupt:
Expand Down
20 changes: 4 additions & 16 deletions src/cutadapt/runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,7 @@
from abc import ABC, abstractmethod
from contextlib import ExitStack
from multiprocessing.connection import Connection
from typing import (
Any,
List,
Optional,
Tuple,
Sequence,
Iterator,
TYPE_CHECKING,
Union,
Callable,
)
from typing import Any, List, Optional, Tuple, Sequence, Iterator, TYPE_CHECKING, Union

import dnaio

Expand Down Expand Up @@ -434,7 +424,7 @@ def close(self) -> None:


def run_pipeline(
pipeline_maker: Callable[[], Pipeline],
pipeline: Pipeline,
inpaths: InputPaths,
outfiles: OutputFiles,
cores: int,
Expand Down Expand Up @@ -465,17 +455,15 @@ def run_pipeline(
runner: PipelineRunner
if cores > 1:
runner = ParallelPipelineRunner(
pipeline_maker(),
pipeline,
inpaths,
outfiles,
progress,
n_workers=cores,
buffer_size=buffer_size,
)
else:
runner = SerialPipelineRunner(
pipeline_maker(), inpaths.open(), outfiles, progress
)
runner = SerialPipelineRunner(pipeline, inpaths.open(), outfiles, progress)

with runner:
statistics = runner.run()
Expand Down
23 changes: 9 additions & 14 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,14 @@ def test_pipeline_single(tmp_path):
QualityTrimmer(cutoff_front=0, cutoff_back=15),
AdapterCutter([adapter]),
]

def make_pipeline():
pipeline = SingleEndPipeline(modifiers)
pipeline.minimum_length = (10,)
pipeline.discard_untrimmed = True
return pipeline

pipeline = SingleEndPipeline(modifiers)
pipeline.minimum_length = (10,)
pipeline.discard_untrimmed = True
inpaths = InputPaths(datapath("small.fastq"))
info_file = file_opener.xopen_or_none(tmp_path / "info.txt", "wb")
out = file_opener.xopen(tmp_path / "out.fastq", "wb")
outfiles = OutputFiles(info=info_file, out=out)
stats = run_pipeline(make_pipeline, inpaths, outfiles, cores=1)
stats = run_pipeline(pipeline, inpaths, outfiles, cores=1)
assert stats is not None
json.dumps(stats.as_json())
outfiles.close()
Expand Down Expand Up @@ -96,11 +92,10 @@ def test_pipeline_paired(tmp_path):
(AdapterCutter([adapter]), None),
]

def make_pipeline():
pipeline = PairedEndPipeline(modifiers, "any")
pipeline.minimum_length = (10, None)
pipeline.discard_untrimmed = True
return pipeline
pipeline = PairedEndPipeline(modifiers, "any")

pipeline.minimum_length = (10, None)
pipeline.discard_untrimmed = True

file_opener = FileOpener()
inpaths = InputPaths(datapath("paired.1.fastq"), datapath("paired.2.fastq"))
Expand All @@ -113,7 +108,7 @@ def make_pipeline():
out=out,
out2=out2,
)
stats = run_pipeline(make_pipeline, inpaths, outfiles, cores=1, progress=True)
stats = run_pipeline(pipeline, inpaths, outfiles, cores=1, progress=True)
assert stats is not None
_ = stats.as_json()
outfiles.close()
Expand Down

0 comments on commit 1551fbd

Please sign in to comment.