Skip to content

Commit

Permalink
unit testing
Browse files Browse the repository at this point in the history
  • Loading branch information
marcmaxson committed Mar 10, 2020
1 parent ed823c6 commit af7ef70
Show file tree
Hide file tree
Showing 7 changed files with 110 additions and 263 deletions.
4 changes: 0 additions & 4 deletions methylprep/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
get_raw_datasets,
run_pipeline,
consolidate_values_for_sheet,
load,
load_both,
read_geo,
)
from .download import run_series, run_series_list, convert_miniml
Expand All @@ -26,8 +24,6 @@
'run_series',
'run_series_list',
'convert_miniml',
'load',
'load_both',
'read_geo',
'build_composite_dataset',
]
3 changes: 0 additions & 3 deletions methylprep/processing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from .preprocess import preprocess_noob
from .raw_dataset import RawDataset, get_raw_datasets, get_array_type
from .postprocess import consolidate_values_for_sheet
from .load_processed import load, load_both
from .read_geo_processed import read_geo

__all__ = [
Expand All @@ -14,7 +13,5 @@
'run_pipeline',
'consolidate_values_for_sheet',
'get_array_type',
'load',
'load_both',
'read_geo',
]
223 changes: 0 additions & 223 deletions methylprep/processing/load_processed.py

This file was deleted.

2 changes: 1 addition & 1 deletion methylprep/processing/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def run_pipeline(data_dir, array_type=None, export=False, manifest_filepath=None
LOGGER.info(f"Exported control probes to {control_filename}")
consolidate_control_snp(data_containers, Path(data_dir,control_filename))

elif betas:
if betas:
return consolidate_values_for_sheet(data_containers, postprocess_func_colname='beta_value')
elif m_value:
return consolidate_values_for_sheet(data_containers, postprocess_func_colname='m_value')
Expand Down
12 changes: 12 additions & 0 deletions tests/download/test_geo_alert.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import methylprep
import pandas as pd
from pathlib import Path

def test_geo_alert(keys="blood spleen"):
df = methylprep.download.geo_alert.search(keys)
if type(df) is not type(pd.DataFrame()):
raise AssertionError()
if Path(f'geo_alert {keys}.csv').exists():
Path(f'geo_alert {keys}.csv').unlink()
else:
print(f'file not found')
25 changes: 25 additions & 0 deletions tests/processing/test_compare_to_minfi.r
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
library(minfi)
# specify directory
minfi_baseDir = paste0("/Users/nrigby/Desktop/idats_standard/batch_1052641/")
# read samplesheet
minfi_targets = read.metharray.sheet(minfi_baseDir)
# read IDAT's into RGChannelSet
rgSet <- read.metharray.exp(targets = minfi_targets1,verbose = TRUE)
# preprocessRaw
mSet.raw = preprocessRaw(rgSet)
# make files
meth.raw = getMeth(mSet.raw)
write.csv(file='~/Desktop/idats_standard/minfi_raw_meth.csv',x=meth.raw1,row.names=TRUE,col.names=TRUE)
unmeth.raw = getUnmeth(mSet.raw)
write.csv(file='~/Desktop/idats_standard/minfi_raw_unmeth.csv',x=unmeth.raw1,row.names=TRUE,col.names=TRUE)
betas.raw = getBeta(mSet.raw)
write.csv(file='~/Desktop/idats_standard/minfi_raw_betas.csv',x=betas.raw1,row.names=TRUE,col.names=TRUE)
# preprocessNoob
mSet.noob = preprocessNoob(rgSet)
# make files
meth.noob = getMeth(mSet.noob)
write.csv(file='~/Desktop/idats_standard/minfi_noob_meth.csv',x=meth.noob,row.names=TRUE,col.names=TRUE)
unmeth.noob = getUnmeth(mSet.noob)
write.csv(file='~/Desktop/idats_standard/minfi_noob_unmeth.csv',x=unmeth.noob,row.names=TRUE,col.names=TRUE)
betas.noob = getBeta(mSet.noob)
write.csv(file='~/Desktop/idats_standard/minfi_noob_betas.csv',x=betas.noob,row.names=TRUE,col.names=TRUE)
Loading

0 comments on commit af7ef70

Please sign in to comment.