Permalink
Browse files

Write some missing tests. Closes #112 #113 #114 #115 #116.

  • Loading branch information...
1 parent 428cb65 commit cbf9899d1ce45c9efbaf72dbc02ee0636ea15bde @onyxfish onyxfish committed Aug 16, 2012
View
@@ -9,3 +9,4 @@ dist
docs/_build
.coverage
.tox
+cover
@@ -11,7 +11,7 @@ class CSVClean(CSVKitUtility):
def add_arguments(self):
self.argparser.add_argument('-n', '--dry-run', dest='dryrun', action='store_true',
- help='If this argument is present, no output will be created. Information about what would have been done will be printed to STDERR.''')
+ help='Do not create output files. Information about what would have been done will be printed to STDERR.')
def main(self):
reader = CSVKitReader(self.args.file, **self.reader_kwargs)
@@ -22,15 +22,15 @@ def main(self):
pass
if checker.errs:
for e in checker.errs:
- self.output_file.write("Line %i: %s\n" % (e.line_number,e.msg))
+ self.output_file.write('Line %i: %s\n' % (e.line_number,e.msg))
else:
- self.output_file.write("No errors.\n")
+ self.output_file.write('No errors.\n')
if checker.joins:
- self.output_file.write("%i rows would have been joined/reduced to %i rows after eliminating expected internal line breaks.\n" % (checker.rows_joined, checker.joins))
+ self.output_file.write('%i rows would have been joined/reduced to %i rows after eliminating expected internal line breaks.\n' % (checker.rows_joined, checker.joins))
else:
base,ext = splitext(self.args.file.name)
# should we preserve delimiters and other dialect args from CLI?
- cleaned_file = CSVKitWriter(open("%s_out.csv" % base,"w"), **self.writer_kwargs)
+ cleaned_file = CSVKitWriter(open('%s_out.csv' % base,'w'), **self.writer_kwargs)
checker = RowChecker(reader)
cleaned_file.writerow(checker.column_names)
@@ -39,23 +39,22 @@ def main(self):
if checker.errs:
# should we preserve delimiters and other dialect args from CLI?
- err_filename = "%s_err.csv" % base
- err_file = CSVKitWriter(open(err_filename, "w"), **self.writer_kwargs)
+ err_filename = '%s_err.csv' % base
+ err_file = CSVKitWriter(open(err_filename, 'w'), **self.writer_kwargs)
err_header = ['line_number','msg']
err_header.extend(checker.column_names)
err_file.writerow(err_header)
for e in checker.errs:
err_file.writerow(self._format_error_row(e))
err_count = len(checker.errs)
- self.output_file.write("%i error%s logged to %s\n" % (err_count,"" if err_count == 1 else "s", err_filename))
+ self.output_file.write('%i error%s logged to %s\n' % (err_count,'' if err_count == 1 else 's', err_filename))
else:
- self.output_file.write("No errors.\n")
+ self.output_file.write('No errors.\n')
if checker.joins:
- self.output_file.write("%i rows were joined/reduced to %i rows after eliminating expected internal line breaks.\n" % (checker.rows_joined, checker.joins))
+ self.output_file.write('%i rows were joined/reduced to %i rows after eliminating expected internal line breaks.\n' % (checker.rows_joined, checker.joins))
def _format_error_row(self, e):
- """Format a row for """
err_row = [e.line_number, e.msg]
err_row.extend(e.row)
return err_row
@@ -64,6 +63,6 @@ def launch_new_instance():
utility = CSVClean()
utility.main()
-if __name__ == "__main__":
+if __name__ == '__main__':
launch_new_instance()
View
@@ -0,0 +1,4 @@
+a,b,c
+1,b,c
+2,b,c
+3,b,c
View
@@ -0,0 +1,4 @@
+a,b,c
+1,b,c
+1,b,c
+4,b,c
@@ -1,9 +1,34 @@
#!/usr/bin/env python
+from cStringIO import StringIO
+import os
import unittest
from csvkit.utilities.csvclean import CSVClean
class TestCSVClean(unittest.TestCase):
- pass
+ def test_simple(self):
+ args = ['examples/bad.csv']
+ output_file = StringIO()
+
+ utility = CSVClean(args, output_file)
+ utility.main()
+
+ self.assertTrue(os.path.exists('examples/bad_err.csv'))
+ self.assertTrue(os.path.exists('examples/bad_out.csv'))
+
+ with open('examples/bad_err.csv') as f:
+ f.next()
+ self.assertEqual(f.next()[0], '1')
+ self.assertEqual(f.next()[0], '2')
+ self.assertRaises(StopIteration, f.next)
+
+ with open('examples/bad_out.csv') as f:
+ f.next()
+ self.assertEqual(f.next()[0], '0')
+ self.assertRaises(StopIteration, f.next)
+
+ # Cleanup
+ os.remove('examples/bad_err.csv')
+ os.remove('examples/bad_out.csv')
@@ -1,9 +1,63 @@
#!/usr/bin/env python
+from cStringIO import StringIO
import unittest
-from csvkit.utilities.csvjoin import CSVJoin
+from csvkit.utilities.csvjoin import CSVJoin
class TestCSVJoin(unittest.TestCase):
- pass
+ def test_sequential(self):
+ args = ['examples/join_a.csv', 'examples/join_b.csv']
+ output_file = StringIO()
+
+ utility = CSVJoin(args, output_file)
+ utility.main()
+
+ output = StringIO(output_file.getvalue())
+
+ self.assertEqual(len(output.readlines()), 4)
+
+ def test_inner(self):
+ args = ['-c', 'a', 'examples/join_a.csv', 'examples/join_b.csv']
+ output_file = StringIO()
+
+ utility = CSVJoin(args, output_file)
+ utility.main()
+
+ output = StringIO(output_file.getvalue())
+
+ self.assertEqual(len(output.readlines()), 3)
+
+ def test_left(self):
+ args = ['-c', 'a', '--left', 'examples/join_a.csv', 'examples/join_b.csv']
+ output_file = StringIO()
+
+ utility = CSVJoin(args, output_file)
+ utility.main()
+
+ output = StringIO(output_file.getvalue())
+
+ self.assertEqual(len(output.readlines()), 5)
+
+ def test_right(self):
+ args = ['-c', 'a', '--right', 'examples/join_a.csv', 'examples/join_b.csv']
+ output_file = StringIO()
+
+ utility = CSVJoin(args, output_file)
+ utility.main()
+
+ output = StringIO(output_file.getvalue())
+
+ self.assertEqual(len(output.readlines()), 4)
+
+ def test_outer(self):
+ args = ['-c', 'a', '--outer', 'examples/join_a.csv', 'examples/join_b.csv']
+ output_file = StringIO()
+
+ utility = CSVJoin(args, output_file)
+ utility.main()
+
+ output = StringIO(output_file.getvalue())
+
+ self.assertEqual(len(output.readlines()), 6)
@@ -1,9 +1,15 @@
#!/usr/bin/env python
+from cStringIO import StringIO
import unittest
from csvkit.utilities.csvstat import CSVStat
class TestCSVStat(unittest.TestCase):
- pass
+ def test_runs(self):
+ args = ['examples/dummy.csv']
+ output_file = StringIO()
+
+ utility = CSVStat(args, output_file)
+ utility.main()

0 comments on commit cbf9899

Please sign in to comment.