Permalink
Browse files

Merge branch 'master' of github.com:MIT-LCP/mimic-code

  • Loading branch information...
2 parents 46cbd21 + f796b6f commit cffc02d6898a0830bfc98952cb205e2d1d168221 @tompollard tompollard committed Dec 22, 2015
Showing with 67 additions and 6 deletions.
  1. +67 −6 tests/test_postgres_build.py
@@ -5,8 +5,13 @@
import os
from subprocess import call
+# Prep for Oracle and MySQL database connection
+# http://stackoverflow.com/questions/10065051/python-pandas-and-databases-like-mysql
+# import cx_Oracle
+# import MySQLdb
+
# Config
-sqluser = 'postgres'
+psqluser = 'postgres'
testdbname = 'mimic_test_db'
hostname = 'localhost'
datadir = 'testdata/v1_3/'
@@ -78,21 +83,41 @@ def run_postgres_build_scripts(cur):
mimic_data_dir = '/home/mimicadmin/data/mimiciii_1_3/'
else:
mimic_data_dir = curpath+datadir
- call(['psql','-f',fn,'-d',testdbname,'-U',sqluser,'-v','mimic_data_dir='+mimic_data_dir])
+ call(['psql','-f',fn,'-d',testdbname,'-U',psqluser,'-v','mimic_data_dir='+mimic_data_dir])
# Add constraints
fn = curpath + '../buildmimic/postgres/postgres_add_constraints.sql'
cur.execute(open(fn, "r").read())
# Add indexes
fn = curpath + '../buildmimic/postgres/postgres_add_indexes.sql'
cur.execute(open(fn, "r").read())
+# # Prep for adding MySQL build
+# def run_mysql_build_scripts(cur):
+# # Create tables
+# fn = curpath + '../buildmimic/mysql/mysql_create_tables.sql'
+# cur.execute(open(fn, "r").read())
+# # Loads data
+# fn = curpath + '../buildmimic/mysql/mysql_load_data.sql'
+# if os.environ.has_key('USER') and os.environ['USER'] == 'jenkins':
+# # use full dataset
+# mimic_data_dir = '/home/mimicadmin/data/mimiciii_1_3/'
+# else:
+# mimic_data_dir = curpath+datadir
+# call(['psql','-f',fn,'-d',testdbname,'-U',psqluser,'-v','mimic_data_dir='+mimic_data_dir])
+# # Add constraints
+# fn = curpath + '../buildmimic/mysql/mysql_add_constraints.sql'
+# cur.execute(open(fn, "r").read())
+# # Add indexes
+# fn = curpath + '../buildmimic/mysql/mysql_add_indexes.sql'
+# cur.execute(open(fn, "r").read())
+
# Class to run unit tests
class test_postgres(unittest.TestCase):
# setUpClass runs once for the class
@classmethod
def setUpClass(cls):
# Connect to default postgres database
- cls.con = psycopg2.connect(dbname='postgres', user=sqluser)
+ cls.con = psycopg2.connect(dbname='postgres', user=psqluser)
cls.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cls.cur = cls.con.cursor()
# Create test database
@@ -104,7 +129,7 @@ def setUpClass(cls):
cls.cur.close()
cls.con.close()
# Connect to the test database
- cls.con = psycopg2.connect(dbname=testdbname, user=sqluser)
+ cls.con = psycopg2.connect(dbname=testdbname, user=psqluser)
cls.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cls.cur = cls.con.cursor()
# Build the test database
@@ -116,7 +141,7 @@ def setUpClass(cls):
@classmethod
def tearDownClass(cls):
# Connect to default postgres database
- cls.con = psycopg2.connect(dbname='postgres', user=sqluser)
+ cls.con = psycopg2.connect(dbname='postgres', user=psqluser)
cls.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cls.cur = cls.con.cursor()
# Drop test database
@@ -127,7 +152,7 @@ def tearDownClass(cls):
# setUp runs once for each test method
def setUp(self):
# Connect to the test database
- self.con = psycopg2.connect(dbname=testdbname, user=sqluser)
+ self.con = psycopg2.connect(dbname=testdbname, user=psqluser)
self.con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
self.cur = self.con.cursor()
@@ -250,6 +275,42 @@ def test_row_counts_are_as_expected(self):
queryresult = pd.read_sql_query(query,self.con)
self.assertEqual(queryresult.values[0][0],expectedrows)
+ def test_age_and_los_is_expected(self):
+ query = \
+ """
+ WITH icuadmissions as (
+ SELECT a.subject_id, a.hadm_id, i.icustay_id,
+ a.admittime as hosp_admittime, a.dischtime as hosp_dischtime,
+ i.first_careunit,
+ DENSE_RANK() over(PARTITION BY a.hadm_id ORDER BY i.intime ASC) as icu_seq,
+ p.dob, p.dod, i.intime as icu_intime, i.outtime as icu_outtime,
+ i.los as icu_los,
+ round((EXTRACT(EPOCH FROM (a.dischtime-a.admittime))/60/60/24) :: NUMERIC, 4) as hosp_los,
+ p.gender,
+ round((EXTRACT(EPOCH FROM (a.admittime-p.dob))/60/60/24/365.242) :: NUMERIC, 4) as age_hosp_in,
+ round((EXTRACT(EPOCH FROM (i.intime-p.dob))/60/60/24/365.242) :: NUMERIC, 4) as age_icu_in,
+ hospital_expire_flag,
+ CASE WHEN p.dod IS NOT NULL
+ AND p.dod >= i.intime - interval '6 hour'
+ AND p.dod <= i.outtime + interval '6 hour' THEN 1
+ ELSE 0 END AS icu_expire_flag
+ FROM admissions a
+ INNER JOIN icustays i
+ ON a.hadm_id = i.hadm_id
+ INNER JOIN patients p
+ ON a.subject_id = p.subject_id
+ ORDER BY a.subject_id, i.intime)
+ SELECT round(avg(age_icu_in)) as avg_age_icu,
+ round(avg(hosp_los)) as avg_los_hosp,
+ round(avg(icu_los)) as avg_los_icu
+ FROM icuadmissions;
+ """
+ queryresult = pd.read_sql_query(query,self.con)
+ self.assertEqual(queryresult['avg_age_icu'].values[0][0],65)
+ self.assertEqual(queryresult['avg_los_hosp'].values[0][0],11)
+ self.assertEqual(queryresult['avg_los_icu'].values[0][0],5)
+
+
def main():
unittest.main()

0 comments on commit cffc02d

Please sign in to comment.