Skip to content

Commit

Permalink
Minor bug fixes related to last update
Browse files Browse the repository at this point in the history
  • Loading branch information
standage committed Nov 10, 2018
1 parent ffdeb21 commit 3f9d0e3
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 7 deletions.
4 changes: 2 additions & 2 deletions kevlar/cli/alac.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ def subparser(subparsers):
'length of the longest contig; each bin specifies '
'a reference target sequence against which '
'assembled contigs will be aligned')
local_args.add_argument('--include', metavar='REGEX', type=re.escape,
local_args.add_argument('--include', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs do not match the given pattern')
local_args.add_argument('--exclude', metavar='REGEX', type=re.escape,
local_args.add_argument('--exclude', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs match the given pattern')

Expand Down
4 changes: 2 additions & 2 deletions kevlar/cli/cutout.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ def subparser(subparsers):
'reference targets if the distance between two '
'seed matches is > X; by default, X is 3 times the '
'length of the longest contig')
subparser.add_argument('--include', metavar='REGEX', type=re.escape,
subparser.add_argument('--include', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs do not match the given pattern')
subparser.add_argument('--exclude', metavar='REGEX', type=re.escape,
subparser.add_argument('--exclude', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs match the given pattern')
subparser.add_argument('contigs', help='assembled reads in Fasta format')
Expand Down
4 changes: 2 additions & 2 deletions kevlar/cli/localize.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ def subparser(subparsers):
'reference targets if the distance between two '
'seed matches is > X; by default, X is 3 times the '
'length of the longest contig')
subparser.add_argument('--include', metavar='REGEX', type=re.escape,
subparser.add_argument('--include', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs do not match the given pattern')
subparser.add_argument('--exclude', metavar='REGEX', type=re.escape,
subparser.add_argument('--exclude', metavar='REGEX', type=str,
help='discard alignments to any chromosomes whose '
'sequence IDs match the given pattern')
subparser.add_argument('contigs', help='assembled reads in Fasta format')
Expand Down
6 changes: 5 additions & 1 deletion kevlar/cutout.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ def contigs_2_seeds(partstream, seedstream, seedsize=51, logstream=sys.stdout):
for n, seed in enumerate(sorted(seeds)):
print('>seed{}\n{}'.format(n, seed), file=seedstream)
seedstream.flush()
message = 'contigs decomposed into {} seeds'.format(n)
print('[kevlar::cutout]', message, file=logstream)


def get_seed_matches(seedfile, refrfile, seedsize=51, logstream=sys.stdout):
Expand All @@ -56,6 +58,8 @@ def get_seed_matches(seedfile, refrfile, seedsize=51, logstream=sys.stdout):
seed_index = dict()
for seqid, start, end, seq in bwa_align(bwa_args, seqfilename=seedfile):
seed_index[seq] = (seqid, start)
message = 'found positions for {} seeds'.format(len(seed_index))
print('[kevlar::cutout]', message, file=logstream)
return seed_index


Expand Down Expand Up @@ -123,7 +127,7 @@ def cutout(partstream, refrfile, seedsize=51, delta=50, maxdiff=None,
cutter = localize(
contiglist, refrseqs, seed_matches, seedsize=seedsize, delta=delta,
maxdiff=maxdiff, inclpattern=inclpattern, exclpattern=exclpattern,
debug=True, logstream=logstream
debug=False, logstream=logstream
)
for gdna in cutter:
yield partid, gdna
Expand Down

0 comments on commit 3f9d0e3

Please sign in to comment.