Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Swap the order of arguments in Column from name, data, .. to data, name, .. #840

Merged
merged 4 commits into from Mar 6, 2013
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
64 changes: 32 additions & 32 deletions astropy/coordinates/tests/accuracy/generate_ref_ast.py
Expand Up @@ -51,13 +51,13 @@ def ref_fk4_no_e_fk4(fnout='fk4_no_e_fk4.csv'):

# Write out table to a CSV file
t = Table()
t.add_column(Column('obstime', obstime))
t.add_column(Column('ra_in', ra))
t.add_column(Column('dec_in', dec))
t.add_column(Column('ra_fk4ne', ra_fk4ne))
t.add_column(Column('dec_fk4ne', dec_fk4ne))
t.add_column(Column('ra_fk4', ra_fk4))
t.add_column(Column('dec_fk4', dec_fk4))
t.add_column(Column(name='obstime', data=obstime))
t.add_column(Column(name='ra_in', data=ra))
t.add_column(Column(name='dec_in', data=dec))
t.add_column(Column(name='ra_fk4ne', data=ra_fk4ne))
t.add_column(Column(name='dec_fk4ne', data=dec_fk4ne))
t.add_column(Column(name='ra_fk4', data=ra_fk4))
t.add_column(Column(name='dec_fk4', data=dec_fk4))
f = open(fnout, 'wb')
f.write("# This file was generated with the {0} script, and the reference "
"values were computed using AST\n".format(os.path.basename(__file__)))
Expand Down Expand Up @@ -112,15 +112,15 @@ def ref_fk4_no_e_fk5(fnout='fk4_no_e_fk5.csv'):

# Write out table to a CSV file
t = Table()
t.add_column(Column('equinox_fk4', equinox_fk4))
t.add_column(Column('equinox_fk5', equinox_fk5))
t.add_column(Column('obstime', obstime))
t.add_column(Column('ra_in', ra))
t.add_column(Column('dec_in', dec))
t.add_column(Column('ra_fk5', ra_fk5))
t.add_column(Column('dec_fk5', dec_fk5))
t.add_column(Column('ra_fk4', ra_fk4))
t.add_column(Column('dec_fk4', dec_fk4))
t.add_column(Column(name='equinox_fk4', data=equinox_fk4))
t.add_column(Column(name='equinox_fk5', data=equinox_fk5))
t.add_column(Column(name='obstime', data=obstime))
t.add_column(Column(name='ra_in', data=ra))
t.add_column(Column(name='dec_in', data=dec))
t.add_column(Column(name='ra_fk5', data=ra_fk5))
t.add_column(Column(name='dec_fk5', data=dec_fk5))
t.add_column(Column(name='ra_fk4', data=ra_fk4))
t.add_column(Column(name='dec_fk4', data=dec_fk4))
f = open(fnout, 'wb')
f.write("# This file was generated with the {0} script, and the reference "
"values were computed using AST\n".format(os.path.basename(__file__)))
Expand Down Expand Up @@ -174,14 +174,14 @@ def ref_galactic_fk4(fnout='galactic_fk4.csv'):

# Write out table to a CSV file
t = Table()
t.add_column(Column('equinox_fk4', equinox_fk4))
t.add_column(Column('obstime', obstime))
t.add_column(Column('lon_in', lon))
t.add_column(Column('lat_in', lat))
t.add_column(Column('ra_fk4', ra_fk4))
t.add_column(Column('dec_fk4', dec_fk4))
t.add_column(Column('lon_gal', lon_gal))
t.add_column(Column('lat_gal', lat_gal))
t.add_column(Column(name='equinox_fk4', data=equinox_fk4))
t.add_column(Column(name='obstime', data=obstime))
t.add_column(Column(name='lon_in', data=lon))
t.add_column(Column(name='lat_in', data=lat))
t.add_column(Column(name='ra_fk4', data=ra_fk4))
t.add_column(Column(name='dec_fk4', data=dec_fk4))
t.add_column(Column(name='lon_gal', data=lon_gal))
t.add_column(Column(name='lat_gal', data=lat_gal))
f = open(fnout, 'wb')
f.write("# This file was generated with the {0} script, and the reference "
"values were computed using AST\n".format(os.path.basename(__file__)))
Expand Down Expand Up @@ -235,14 +235,14 @@ def ref_icrs_fk5(fnout='icrs_fk5.csv'):

# Write out table to a CSV file
t = Table()
t.add_column(Column('equinox_fk5', equinox_fk5))
t.add_column(Column('obstime', obstime))
t.add_column(Column('ra_in', ra))
t.add_column(Column('dec_in', dec))
t.add_column(Column('ra_fk5', ra_fk5))
t.add_column(Column('dec_fk5', dec_fk5))
t.add_column(Column('ra_icrs', ra_icrs))
t.add_column(Column('dec_icrs', dec_icrs))
t.add_column(Column(name='equinox_fk5', data=equinox_fk5))
t.add_column(Column(name='obstime', data=obstime))
t.add_column(Column(name='ra_in', data=ra))
t.add_column(Column(name='dec_in', data=dec))
t.add_column(Column(name='ra_fk5', data=ra_fk5))
t.add_column(Column(name='dec_fk5', data=dec_fk5))
t.add_column(Column(name='ra_icrs', data=ra_icrs))
t.add_column(Column(name='dec_icrs', data=dec_icrs))
f = open(fnout, 'wb')
f.write("# This file was generated with the {0} script, and the reference "
"values were computed using AST\n".format(os.path.basename(__file__)))
Expand Down
40 changes: 20 additions & 20 deletions astropy/io/misc/tests/test_hdf5.py
Expand Up @@ -34,7 +34,7 @@ def _default_values(dtype):
def test_write_nopath(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(ValueError) as exc:
t1.write(test_file)
assert exc.value.args[0] == "table path should be set via the path= argument"
Expand All @@ -44,7 +44,7 @@ def test_write_nopath(tmpdir):
def test_read_nopath(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
with pytest.raises(ValueError) as exc:
Table.read(test_file)
Expand All @@ -55,7 +55,7 @@ def test_read_nopath(tmpdir):
def test_write_invalid_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(ValueError) as exc:
t1.write(test_file, path='test/')
assert exc.value.args[0] == "table path should end with table name, not /"
Expand All @@ -65,7 +65,7 @@ def test_write_invalid_path(tmpdir):
def test_read_invalid_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
with pytest.raises(ValueError) as exc:
Table.read(test_file, path='test/')
Expand Down Expand Up @@ -104,7 +104,7 @@ def test_read_missing_group_fileobj(tmpdir):
def test_read_write_simple(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
Expand All @@ -114,7 +114,7 @@ def test_read_write_simple(tmpdir):
def test_read_write_existing_table(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')
with pytest.raises(IOError) as exc:
t1.write(test_file, path='the_table', append=True)
Expand All @@ -125,7 +125,7 @@ def test_read_write_existing_table(tmpdir):
def test_read_write_memory(tmpdir):
with h5py.File('test', driver='core', backing_store=False) as output_file:
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='the_table')
t2 = Table.read(output_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
Expand All @@ -136,7 +136,7 @@ def test_read_write_existing(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
with pytest.raises(IOError) as exc:
t1.write(test_file, path='the_table')
assert exc.value.args[0].startswith("File exists:")
Expand All @@ -147,7 +147,7 @@ def test_read_write_existing_overwrite(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table', overwrite=True)
t2 = Table.read(test_file, path='the_table')
assert np.all(t2['a'] == [1, 2, 3])
Expand All @@ -158,7 +158,7 @@ def test_read_write_existing_append(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))
h5py.File(test_file, 'w').close() # create empty file
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table_1', append=True)
t1.write(test_file, path='the_table_2', append=True)
t2 = Table.read(test_file, path='the_table_1')
Expand All @@ -173,7 +173,7 @@ def test_read_write_existing_append_groups(tmpdir):
with h5py.File(test_file, 'w') as f:
f.create_group('test_1')
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='test_1/the_table_1', append=True)
t1.write(test_file, path='test_2/the_table_2', append=True)
t2 = Table.read(test_file, path='test_1/the_table_1')
Expand All @@ -188,7 +188,7 @@ def test_read_fileobj(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))

t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='the_table')

import h5py
Expand All @@ -203,7 +203,7 @@ def test_read_filobj_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))

t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='path/to/data/the_table')

import h5py
Expand All @@ -218,7 +218,7 @@ def test_read_filobj_group_path(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))

t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(test_file, path='path/to/data/the_table')

import h5py
Expand All @@ -235,7 +235,7 @@ def test_write_fileobj(tmpdir):
import h5py
with h5py.File(test_file, 'w') as output_file:
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='the_table')

t2 = Table.read(test_file, path='the_table')
Expand All @@ -250,7 +250,7 @@ def test_write_filobj_group(tmpdir):
import h5py
with h5py.File(test_file, 'w') as output_file:
t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))
t1.write(output_file, path='path/to/data/the_table')

t2 = Table.read(test_file, path='path/to/data/the_table')
Expand All @@ -266,7 +266,7 @@ def test_preserve_single_dtypes(tmpdir, dtype):
values = _default_values(dtype)

t1 = Table()
t1.add_column(Column('a', np.array(values, dtype=dtype)))
t1.add_column(Column(name='a', data=np.array(values, dtype=dtype)))
t1.write(test_file, path='the_table')

t2 = Table.read(test_file, path='the_table')
Expand All @@ -284,7 +284,7 @@ def test_preserve_all_dtypes(tmpdir):

for dtype in ALL_DTYPES:
values = _default_values(dtype)
t1.add_column(Column(str(dtype), np.array(values, dtype=dtype)))
t1.add_column(Column(name=str(dtype), data=np.array(values, dtype=dtype)))

t1.write(test_file, path='the_table')

Expand All @@ -302,7 +302,7 @@ def test_preserve_meta(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))

t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))

t1.meta['a'] = 1
t1.meta['b'] = 'hello'
Expand All @@ -324,7 +324,7 @@ def test_skip_meta(tmpdir):
test_file = str(tmpdir.join('test.hdf5'))

t1 = Table()
t1.add_column(Column('a', [1, 2, 3]))
t1.add_column(Column(name='a', data=[1, 2, 3]))

t1.meta['a'] = 1
t1.meta['b'] = 'hello'
Expand Down
2 changes: 1 addition & 1 deletion astropy/table/__init__.py
@@ -1,5 +1,5 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .table import Column, Table, TableColumns, Row, MaskedColumn, WARN_COLUMN_ARGS
from .table import Column, Table, TableColumns, Row, MaskedColumn

# Import routines that connect readers/writers to astropy.table
from ..io.ascii import connect
Expand Down
41 changes: 16 additions & 25 deletions astropy/table/table.py
Expand Up @@ -29,33 +29,24 @@
'The template that determines the name of a column if it cannot be '
'determined. Uses new-style (format method) string formatting')

WARN_COLUMN_ARGS = ConfigurationItem("warn_column_args",
True,
"Show a warning when a Column is created "
"in a way that will break in Astropy 0.3")
WARN_COLUMN_ARGS_MESSAGE = \
"""In the next major release of astropy (0.3), the order of function
arguments for creating a {class_name} will change. Currently the order is
{class_name}(name, data, ...), but in 0.3 and later it will be
{class_name}(data, name, ...). This is consistent with Table and NumPy.

In order to use the same code for Astropy 0.2 and 0.3, column objects
should be created using named keyword arguments for data and name, e.g.:
{class_name}(name='a', data=[1, 2])."""
ERROR_COLUMN_ARGS_MESSAGE = """
The first argument to {class_name} is the string {first_arg}, which was probably intended
as the column name. Starting in Astropy 0.3 the argument order for initializing
a {class_name} object is {class_name}(data=None, name=None, ...)."""


def _check_column_new_args(func):
"""
Decorator for Column and MaskedColumn __new__(cls, ...) to check that there
is only one ``args`` value (which is the class). Everything else
should be a keyword argument. Otherwise the calling code will break
when the name and data args are swapped in 0.3.
Decorator for transition from 0.2 arg order (name, data, ..) to 0.3 order (data,
name, ...). Check if user provided a string as the first arg (note that a string
cannot be valid as ``data``). Raise an error with a useful message.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(args) > 1 and WARN_COLUMN_ARGS():
if len(args) > 1 and isinstance(args[1], basestring):
cls = args[0] # Column or MaskedColumn class from __new__(cls, ..)
warnings.warn(WARN_COLUMN_ARGS_MESSAGE.format(class_name=cls.__name__))
raise ValueError(ERROR_COLUMN_ARGS_MESSAGE.format(class_name=cls.__name__,
first_arg=repr(args[1])))
return func(*args, **kwargs)
return wrapper

Expand Down Expand Up @@ -386,10 +377,10 @@ class Column(BaseColumn, np.ndarray):

Parameters
----------
name : str
Column name and key for reference within Table
data : list, ndarray or None
Column data values
name : str
Column name and key for reference within Table
dtype : numpy.dtype compatible value
Data type for column
shape : tuple or ()
Expand Down Expand Up @@ -460,7 +451,7 @@ class Column(BaseColumn, np.ndarray):
"""

@_check_column_new_args
def __new__(cls, name=None, data=None,
def __new__(cls, data=None, name=None,
dtype=None, shape=(), length=0,
description=None, units=None, format=None, meta=None):

Expand Down Expand Up @@ -516,10 +507,10 @@ class MaskedColumn(BaseColumn, ma.MaskedArray):

Parameters
----------
name : str
Column name and key for reference within Table
data : list, ndarray or None
Column data values
name : str
Column name and key for reference within Table
mask : list, ndarray or None
Boolean mask for which True indicates missing or invalid data
fill_value : float, int, str or None
Expand Down Expand Up @@ -597,7 +588,7 @@ class MaskedColumn(BaseColumn, ma.MaskedArray):
"""

@_check_column_new_args
def __new__(cls, name=None, data=None, mask=None, fill_value=None,
def __new__(cls, data=None, name=None, mask=None, fill_value=None,
dtype=None, shape=(), length=0,
description=None, units=None, format=None, meta=None):

Expand Down