Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Clean codes. Replace scipy.signal.convolve with numpy.convolve. Bring

 back .c files so that setup.py can work normal now. Set working tag
 number back to 2.0.10.
  • Loading branch information...
commit 70550e286db2dc433b7e3465e9b6b97bf20fbe60 1 parent c97c382
@taoliu authored
View
29 ChangeLog
@@ -1,22 +1,18 @@
-2012-04-27 Benjamin Schiller <benjamin.schiller@ucsf.edu>
- MACS version 2.0.11 (tag:alpha still working)
+2012-04-29 Benjamin Schiller <benjamin.schiller@ucsf.edu>, Tao Liu <taoliu@jimmy.harvard.edu>
+ MACS version 2.0.10 (tag:alpha not released)
- * Introduced BAMPEParser *
- Reads PE data directly, requires bedtools for now
-
- * Introduced --call-summits *
- Uses signal processing methods to call overlapping peaks
+ * Introduced BAMPEParser *
+ Reads PE data directly, requires bedtools for now
- * Added --no-trackline *
- By default, files have descriptive tracklines now
+ * Introduced --call-summits *
+ Uses signal processing methods to call overlapping peaks
- * Changes to output *
- cPeakDetect.pyx has full support for new print/write methods and
- --call-peaks, BAMPEParser, and use of paired-end data
+ * Added --no-trackline *
+ By default, files have descriptive tracklines now
-
-2012-03-12 Tao Liu <taoliu@jimmy.harvard.edu>
- MACS version 2.0.10 (tag:alpha still working)
+ * Changes to output *
+ cPeakDetect.pyx has full support for new print/write methods and
+ --call-peaks, BAMPEParser, and use of paired-end data
* Parser optimization
@@ -74,8 +70,7 @@
* SAMParser and BAMParser
- Bug fixed for paired-end sequencing data. Thanks to Benjamin
- Schiller!
+ Bug fixed for paired-end sequencing data.
* BedGraph.pyx
View
2  DEBIAN/control
@@ -1,5 +1,5 @@
Package: macs2
-Version: 2.0.11
+Version: 2.0.10
Priority: optional
Maintainer: Tao Liu <vladimir.liu@gmail.com>
Depends: python2.7, python2.7-numpy (>=1.3)
View
6 INSTALL.rst
@@ -1,7 +1,7 @@
======================
INSTALL Guide For MACS
======================
-Time-stamp: <2012-04-11 12:06:28 Tao Liu>
+Time-stamp: <2012-04-28 12:54:45 Tao Liu>
Please check the following instructions to complete your installation.
@@ -34,11 +34,11 @@ your operation system. You can easily manage the package, and the
uninstallation is much easier. Download the *deb* package from MACS
download page, and type this in the commend line::
- $ dpkg -i macs_2.0.11.deb
+ $ dpkg -i macs_2.0.10.deb
To uninstall, type::
- $ dpkg -r macs_2.0.11
+ $ dpkg -r macs_2.0.10
This is tested only in Ubuntu >= 10.04 LTS.
View
2  MACS2/Constants.py
@@ -1,4 +1,4 @@
-MACS_VERSION = "2.0.11 20120427 (tag:alpha)"
+MACS_VERSION = "2.0.10 20120427 (tag:alpha)"
MACSDIFF_VERSION = "1.0.4 20110212 (tag:alpha)"
FILTERDUP_VERSION = "1.0.0 20110906"
RANDSAMPLE_VERSION = "1.0.0 20111102"
View
17,559 MACS2/IO/cBedGraph.c
17,559 additions, 0 deletions not shown
View
5 MACS2/IO/cBedGraph.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-25 18:11:11 Tao Liu>
+# Time-stamp: <2012-04-29 21:59:33 Tao Liu>
"""Module for Feature IO classes.
@@ -23,6 +23,7 @@ import logging
from array import array
import numpy as np
+np_convolve = np.convolve
from libc.math cimport sqrt
from libc.math cimport log
@@ -444,7 +445,7 @@ class bedGraphTrackI:
peakindices[i:j] = tmpindex
# apply smoothing window of tsize / 2
w = np.ones(smoothlen, dtype='float32')
- smoothdata = fftconvolve(w/w.sum(), peakdata, mode='same')
+ smoothdata = np_convolve(w/w.sum(), peakdata, mode='same')
# find maxima and minima
local_extrema = np.where(np.diff(np.sign(np.diff(smoothdata))))[0]+1
# get only maxima by requiring it be greater than the mean
View
3,065 MACS2/IO/cBedGraphIO.c
3,065 additions, 0 deletions not shown
View
18,276 MACS2/IO/cCompositeScoreTrack.c
18,276 additions, 0 deletions not shown
View
13,527 MACS2/IO/cFixWidthTrack.c
13,527 additions, 0 deletions not shown
View
41 MACS2/IO/cFixWidthTrack.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-24 18:20:50 Tao Liu>
+# Time-stamp: <2012-04-29 18:04:37 Tao Liu>
"""Module for FWTrack classes.
@@ -24,8 +24,7 @@ from array import array
from random import sample as random_sample
import sys
from MACS2.Constants import *
-
-#from MACS2.cArray import IntArray
+from libc.stdint cimport uint32_t, uint64_t, int32_t, int64_t
import numpy as np
cimport numpy as np
@@ -53,7 +52,7 @@ class FWTrackIII:
Locations are stored and organized by sequence names (chr names) in a
dict. They can be sorted by calling self.sort() function.
"""
- def __init__ (self, int fw=0, char * anno=""):
+ def __init__ (self, int32_t fw=0, char * anno=""):
"""fw is the fixed-width for all locations.
"""
@@ -65,7 +64,7 @@ class FWTrackIII:
self.annotation = anno # need to be figured out
- def add_loc ( self, str chromosome, int fiveendpos, int strand ):
+ def add_loc ( self, str chromosome, int32_t fiveendpos, int32_t strand ):
"""Add a location to the list according to the sequence name.
chromosome -- mostly the chromosome name
@@ -73,7 +72,7 @@ class FWTrackIII:
strand -- 0: plus, 1: minus
"""
if not self.__locations.has_key(chromosome):
- self.__locations[chromosome] = [ np.zeros(BUFFER_SIZE, dtype='int32'), np.zeros(BUFFER_SIZE, dtype='int32') ]
+ self.__locations[chromosome] = [ np.zeros(BUFFER_SIZE, dtype='int32'), np.zeros(BUFFER_SIZE, dtype='int32') ] # [plus,minus strand]
self.__pointer[chromosome] = [ 0, 0 ]
try:
self.__locations[chromosome][strand][self.__pointer[chromosome][strand]] = fiveendpos
@@ -90,14 +89,14 @@ class FWTrackIII:
def finalize ( self ):
""" Resize np arrays for 5' positions and sort them in place """
- cdef int i
+ cdef int32_t i
cdef str c
self.total+=0
chrnames = self.get_chr_names()
- for i in xrange(len(chrnames)):
+ for i in range(len(chrnames)):
c = chrnames[i]
self.__locations[c][0].resize( self.__pointer[c][0], refcheck=False )
self.__locations[c][0].sort()
@@ -133,26 +132,28 @@ class FWTrackIII:
"""Naive sorting for locations.
"""
- cdef int i
+ cdef int32_t i
cdef str c
chrnames = self.get_chr_names()
- for i in xrange(len(chrnames)):
+ for i in range(len(chrnames)):
c = chrnames[i]
self.__locations[c][0].sort()
self.__locations[c][1].sort()
self.__sorted = True
- def filter_dup ( self, int maxnum ):
+ def filter_dup ( self, int32_t maxnum = -1 ):
"""Filter the duplicated reads.
Run it right after you add all data into this object.
"""
- cdef int p, m, n, current_loc, i_chrom
- cdef long i_old, i_new # index for old array, and index for new one
+ cdef int32_t p, m, n, current_loc, i_chrom
+ cdef int32_t i_old, i_new # index for old array, and index for new one
cdef str k
+
+ if maxnum < 0: return # do nothing
if not self.__sorted:
self.sort()
@@ -246,7 +247,7 @@ class FWTrackIII:
Warning: the current object is changed!
"""
- cdef long num, i_chrom
+ cdef int32_t num, i_chrom # num: number of reads allowed on a certain chromosome
cdef str key
self.total = 0
@@ -259,13 +260,13 @@ class FWTrackIII:
key = chrnames[ i_chrom ]
- num = long( round(self.__locations[key][0].shape[0] * percent, 2 ) )
+ num = <int32_t>round(self.__locations[key][0].shape[0] * percent, 2 )
np.random.shuffle( self.__locations[key][0] )
self.__locations[key][0].resize( num )
self.__locations[key][0].sort()
self.__pointer[key][0] = self.__locations[key][0].shape[0]
- num = long( round(self.__locations[key][1].shape[0] * percent, 2 ) )
+ num = <int32_t>round(self.__locations[key][1].shape[0] * percent, 2 )
np.random.shuffle( self.__locations[key][1] )
self.__locations[key][1].resize( num )
self.__locations[key][1].sort()
@@ -274,14 +275,12 @@ class FWTrackIII:
self.total += self.__pointer[key][0] + self.__pointer[key][1]
return
- def sample_num (self, long samplesize):
+ def sample_num (self, uint64_t samplesize):
"""Sample the tags for a given percentage.
Warning: the current object is changed!
"""
cdef float percent
- cdef long num
- cdef str key
percent = float(samplesize)/self.total
self.sample_percent ( percent )
@@ -292,9 +291,7 @@ class FWTrackIII:
write to a file, otherwise, output to standard output.
"""
- cdef long i
- cdef long i_chrom
- cdef int p
+ cdef int32_t i, i_chrom, p
cdef str k
if not fhd:
View
13,929 MACS2/IO/cParser.c
13,929 additions, 0 deletions not shown
View
4 MACS2/IO/cParser.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-27 03:46:07 Tao Liu>
+# Time-stamp: <2012-04-29 17:19:11 Tao Liu>
"""Module for all MACS Parser classes for input.
@@ -25,6 +25,7 @@ import gzip
import io
from MACS2.Constants import *
from MACS2.IO.cFixWidthTrack import FWTrackIII
+from libc.stdint cimport uint32_t, uint64_t
cdef extern from "stdlib.h":
ctypedef unsigned int size_t
@@ -192,6 +193,7 @@ class GenericParser:
i=0
fwtrack.add_loc( chromosome, fpos, strand )
+ # close fwtrack and sort
fwtrack.finalize()
# close file stream.
self.close()
View
14,421 MACS2/IO/cPeakIO.c
14,421 additions, 0 deletions not shown
View
4 MACS2/IO/cPeakIO.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-13 17:09:07 Tao Liu>
+# Time-stamp: <2012-04-29 17:27:17 Tao Liu>
"""Module for PeakIO IO classes.
@@ -35,7 +35,7 @@ __doc__ = "PeakIO class"
# ------------------------------------
# Misc functions
# ------------------------------------
-def subpeak_letters(i):
+cdef subpeak_letters( int i):
if i < 26:
return chr(97+i)
else:
View
18,260 MACS2/IO/cScoreTrack.c
18,260 additions, 0 deletions not shown
View
70 MACS2/IO/cScoreTrack.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-25 18:28:21 Tao Liu>
+# Time-stamp: <2012-04-29 21:59:02 Tao Liu>
"""Module for Feature IO classes.
@@ -20,12 +20,14 @@ with the distribution).
# ------------------------------------
import numpy as np
cimport numpy as np
+
+from array import array as pyarray
+
from cpython cimport bool
-#from np import int64,int32,float32
-from scipy.signal import fftconvolve
+#from scipy.signal import fftconvolve
+np_convolve = np.convolve
from libc.math cimport log10,log
-from operator import itemgetter
from MACS2.Constants import *
from MACS2.cProb cimport poisson_cdf
@@ -35,10 +37,6 @@ from MACS2.hashtable import Int64HashTable
import logging
-#from time import time as ttime
-
-#from MACS2.IO.cBedGraph import bedGraphTrackI
-
# ------------------------------------
# constants
# ------------------------------------
@@ -52,7 +50,6 @@ __doc__ = "scoreTrackI classes"
cdef inline int int_max(int a, int b): return a if a >= b else b
cdef inline int int_min(int a, int b): return a if a <= b else b
-pscore_dict = {}
LOG10_E = 0.43429448190325176
pscore_khashtable = Int64HashTable()
@@ -80,7 +77,6 @@ cdef get_pscore ( int observed, double expectation ):
# pscore_dict[(observed,expectation)] = score
#return score
-logLR_dict = {}
logLR_khashtable = Int64HashTable()
cdef logLR ( double x, double y ):
@@ -149,12 +145,6 @@ class scoreTrackI:
def add_chromosome ( self, str chrom, int chrom_max_len ):
if not self.data.has_key(chrom):
- # self.data[chrom] = np.zeros(chrom_max_len,dtype=[('pos','int32'),
- # ('sample','float32'),
- # ('control','float32'),
- # ('-100logp','int32'),
- # ('-100logq','int32'),
- # ('100logLR','int32'),])
self.data[chrom] = { 'pos': np.zeros(chrom_max_len, dtype="int32"),
'sample': np.zeros(chrom_max_len, dtype="float32"),
'control': np.zeros(chrom_max_len, dtype="float32"),
@@ -270,51 +260,57 @@ class scoreTrackI:
#logging.info("####test#### start make_pq")
n = self.total()
- #value_list = np.empty( n, dtype = [('v', '<f4'), ('l', '<i4')])
- value_dict = {}
- #i = 0 # index for value_list
+ value_dict = Int64HashTable()
+ unique_values = pyarray(BYTE4,[])
# this is a table of how many positions each p value occurs at
for chrom in self.data.keys():
# for each chromosome
pre_p = 0
- pos = iter(self.data[chrom][ 'pos' ]).next
- value = iter(self.data[chrom][ '-100logp' ]).next
+ pos = self.data[chrom][ 'pos' ]
+ value = self.data[chrom][ '-100logp' ]
length = self.pointer[chrom]
- j = 0
- while j<length:
- this_p = pos()
- this_v = value()
+ for j in range(length):
+ this_p = pos[j]
+ this_v = value[j]
assert this_v == this_v, "NaN at %d" % pos
- #value_list[i] = (this_v,this_p-pre_p)
- #i += 1
if value_dict.has_key(this_v):
- value_dict[this_v] += long(this_p - pre_p)
+ value_dict.set_item(this_v, value_dict.get_item(this_v) + this_p - pre_p)
else:
- value_dict[this_v] = long(this_p - pre_p)
- j += 1
+ value_dict.set_item(this_v, this_p - pre_p)
+ unique_values.append(this_v)
pre_p = this_p
- N = sum(value_dict.values())
+ N = 0
+ for i in range(len(unique_values)):
+ N += value_dict.get_item(unique_values[i])
k = 1 # rank
f = -log10(N)
pre_v = -2147483647
pre_l = 0
pre_q = 2147483647 # save the previous q-value
pvalue2qvalue = {pre_v:[0,k,0]} # pvalue:[qvalue,rank,bp_with_this_pvalue]
+ #pvalue2qvalue = np.zeros( (len(unique_values)+1,4), dtype='int64' )
+ #pvalue2qvalue[0] = (pre_v, 0, k, 0)
#logging.info("####test#### start matching pvalue to qvalue")
- for v in sorted(value_dict.keys(),reverse=True):
- l = value_dict[v]
+ unique_values = sorted(unique_values,reverse=True)
+ for i in range(len(unique_values)):
+ v = unique_values[i]
+ l = value_dict.get_item(v)
q = v + int((log10(k) + f) * 100) # we save integers here.
q = max(0,min(pre_q,q)) # make q-score monotonic
+ #pvalue2qvalue[i+1] = (v, q, k, 0)
+ #pvalue2qvalue[i][3] = k - pvalue2qvalue[i][2]
pvalue2qvalue[v] = [q, k, 0]
pvalue2qvalue[pre_v][2] = k-pvalue2qvalue[pre_v][1]
pre_v = v
pre_q = q
k+=l
+ #pvalue2qvalue[i+1][3] = k - pvalue2qvalue[i][2]
pvalue2qvalue[pre_v][2] = k-pvalue2qvalue[pre_v][1]
#logging.info("####test#### finish building pqtable")
# pop the first -1e100 one
pvalue2qvalue.pop(-2147483647)
+ #pvalue2qvalue = pvalue2qvalue[1:]
return pvalue2qvalue
@@ -329,9 +325,11 @@ class scoreTrackI:
# convert pvalue2qvalue to a simple dict
s_p2q = Int64HashTable()
- g = pvalue2qvalue.get
+ #g = pvalue2qvalue.get
for i in pvalue2qvalue.keys():
- s_p2q.set_item(i,g(i)[0])
+ #for i in range(pvalue2qvalue.shape[0]):
+ s_p2q.set_item(i,pvalue2qvalue[i][0])
+ #s_p2q.set_item(pvalue2qvalue[i][0],pvalue2qvalue[i][1])
g = s_p2q.get_item
@@ -421,7 +419,7 @@ class scoreTrackI:
peakindices[i:j] = tmpindex
# apply smoothing window of tsize / 2
w = np.ones(smoothlen, dtype='float32')
- smoothdata = fftconvolve(w/w.sum(), peakdata, mode='same')
+ smoothdata = np_convolve(w/w.sum(), peakdata, mode='same')
# find maxima and minima
local_extrema = np.where(np.diff(np.sign(np.diff(smoothdata))))[0]+1
# get only maxima by requiring it be greater than the mean
View
3,717 MACS2/cArray.c
3,717 additions, 0 deletions not shown
View
11,326 MACS2/cPeakDetect.c
11,326 additions, 0 deletions not shown
View
7 MACS2/cPeakDetect.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-25 17:04:40 Tao Liu>
+# Time-stamp: <2012-04-29 21:05:41 Tao Liu>
"""Module Description
@@ -318,6 +318,8 @@ class PeakDetect:
Finally, a poisson CDF is applied to calculate one-side pvalue
for enrichment.
"""
+ cdef int i
+
treat_total = self.treat.total
control_total = self.control.total
self.ratio_treat2control = float(treat_total)/control_total
@@ -413,8 +415,11 @@ class PeakDetect:
pqfhd = open(self.opt.pqtable,"w")
pqfhd.write( "-log10pvalue\t-log10qvalue\trank\tbasepairs\n" )
for p in sorted(pqtable.keys(),reverse=True):
+ #for i in range(pqtable.shape[0]):
+ #t = pqtable[i]
q = pqtable[p]
pqfhd.write("%.2f\t%.2f\t%d\t%d\n" % (p/100.0,q[0]/100.0,q[1],q[2]))
+ #pqfhd.write("%.2f\t%.2f\t%d\t%d\n" % (t[0]/100.0,t[1]/100.0,t[2],t[3]))
pqfhd.close()
self.info("#3 Assign qvalues ...")
View
12,116 MACS2/cPeakModel.c
12,116 additions, 0 deletions not shown
View
8,320 MACS2/cPileup.c
8,320 additions, 0 deletions not shown
View
19 MACS2/cPileup.pyx
@@ -1,5 +1,5 @@
# cython: profile=True
-# Time-stamp: <2012-04-25 17:33:03 Tao Liu>
+# Time-stamp: <2012-04-29 21:43:17 Tao Liu>
"""Module Description: For pileup functions.
@@ -23,7 +23,6 @@ from array import array
from MACS2.IO.cBedGraph import bedGraphTrackI
from MACS2.Constants import *
-from MACS2.cArray import IntArray
import numpy as np
cimport numpy as np
@@ -83,22 +82,15 @@ def pileup_bdg (trackI, int d, int baseline_value = 0, bool directional = True,
l = len(plus_tags)+len(minus_tags)
- #start_poss = build_start_poss( plus_tags, minus_tags, five_shift, three_shift, l )
- #end_poss = build_end_poss( plus_tags, minus_tags, five_shift, three_shift, l )
-
( start_poss, end_poss ) = start_and_end_poss( plus_tags, minus_tags, five_shift, three_shift )
- #print start_poss[0]
- #print end_poss[0]
ret.add_a_chromosome( chrom, pileup_a_chromosome ( start_poss, end_poss, l, scale_factor, baseline_value ) )
+ # free mem
start_poss.resize(100000, refcheck=False)
start_poss.resize(0, refcheck=False)
end_poss.resize(100000, refcheck=False)
end_poss.resize(0, refcheck=False)
- # free mem?
- #del(start_poss)
- #del(end_poss)
return ret
@@ -186,7 +178,7 @@ def pileup_w_multiple_d_bdg ( trackI, d_s, float baseline_value = 0, bool direct
return ret
-cdef start_and_end_poss ( plus_tags, minus_tags, long five_shift, long three_shift ):
+cdef start_and_end_poss ( np.ndarray plus_tags, np.ndarray minus_tags, long five_shift, long three_shift ):
cdef long i
cdef long lp = plus_tags.shape[0]
cdef long lm = minus_tags.shape[0]
@@ -214,7 +206,7 @@ cdef start_and_end_poss ( plus_tags, minus_tags, long five_shift, long three_shi
return (start_poss, end_poss)
-cdef pileup_a_chromosome ( start_poss, end_poss, long l, float scale_factor = 1, float baseline_value = 0 ):
+cdef pileup_a_chromosome ( np.ndarray start_poss, np.ndarray end_poss, long l, float scale_factor = 1, float baseline_value = 0 ):
"""Return pileup of one chromosome.
"""
@@ -234,7 +226,6 @@ cdef pileup_a_chromosome ( start_poss, end_poss, long l, float scale_factor = 1,
# the first chunk of 0
tmppadd( pre_p )
tmpvadd( float_max(0,baseline_value) )
- #print float_max(0,baseline_value)
pre_v = pileup
@@ -246,7 +237,6 @@ cdef pileup_a_chromosome ( start_poss, end_poss, long l, float scale_factor = 1,
if p != pre_p:
tmppadd( p )
tmpvadd( float_max(pileup * scale_factor, baseline_value) )
- #ret.add_loc(chrom,pre_p,p,pileup)
pre_p = p
pileup += 1
i_s += 1
@@ -255,7 +245,6 @@ cdef pileup_a_chromosome ( start_poss, end_poss, long l, float scale_factor = 1,
if p != pre_p:
tmppadd( p )
tmpvadd( float_max(pileup * scale_factor, baseline_value) )
- #ret.add_loc(chrom,pre_p,p,pileup)
pre_p = p
pileup -= 1
i_e += 1
View
7,463 MACS2/cProb.c
7,463 additions, 0 deletions not shown
View
4,147 MACS2/hashtable.c
1,800 additions, 2,347 deletions not shown
View
2  MACS2/hashtable.pyx
@@ -110,7 +110,7 @@ cdef class Int64HashTable:
def __dealloc__(self):
kh_destroy_int64(self.table)
- cdef inline bint has_key(self, int64_t val):
+ cpdef inline bint has_key(self, int64_t val):
cdef khiter_t k
k = kh_get_int64(self.table, val)
return k != self.table.n_buckets
View
4 setup.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Time-stamp: <2012-04-25 18:47:55 Tao Liu>
+# Time-stamp: <2012-04-28 12:53:07 Tao Liu>
"""Description
@@ -55,7 +55,7 @@ def main():
]
setup(name="MACS",
- version="2.0.11",
+ version="2.0.10",
description="Model Based Analysis for ChIP-Seq data",
author='Tao Liu',
author_email='vladimir.liu@gmail.com',
View
4 setup_w_cython.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Time-stamp: <2012-04-25 11:02:52 Tao Liu>
+# Time-stamp: <2012-04-28 12:53:18 Tao Liu>
"""Description:
@@ -76,7 +76,7 @@ def main():
]
setup(name="MACS",
- version="2.0.11",
+ version="2.0.10",
description="Model Based Analysis for ChIP-Seq data",
author='Tao Liu',
author_email='vladimir.liu@gmail.com',
View
7 test/test_cPeakIO_Region.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Time-stamp: <2012-01-12 22:05:17 Tao Liu>
+# Time-stamp: <2012-04-29 17:27:36 Tao Liu>
import os
import sys
@@ -25,6 +25,7 @@ def setUp(self):
("chrY",600,900),
("chrY",1000,1300),
]
+ self.subpeak_n = [1,10,100,1000]
@@ -49,7 +50,9 @@ def test_merge(self):
self.mr.merge_overlap()
self.mr.write_to_bed(sys.stdout)
-
+# def test_subpeak_letters(self):
+# for i in self.subpeak_n:
+# print subpeak_letters(i)
if __name__ == '__main__':
unittest.main()
View
4 test/test_cPileup.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Time-stamp: <2012-04-25 17:19:57 Tao Liu>
+# Time-stamp: <2012-04-29 18:25:30 Tao Liu>
"""Module Description: Test functions for pileup functions.
@@ -85,7 +85,7 @@ def test_pileup(self):
def test_pileup_w_multiple_d_bdg ( self ):
# build FWTrackII
- self.fwtrack2 = FWTrackIII()
+ self.fwtrack2 = FWTrackIII(fw=5)
for i in self.plus_pos:
self.fwtrack2.add_loc(self.chrom, i, 0)
for i in self.minus_pos:
Please sign in to comment.
Something went wrong with that request. Please try again.