Skip to content

Commit

Permalink
add more checks
Browse files Browse the repository at this point in the history
  • Loading branch information
Davies Liu committed Feb 27, 2015
1 parent 46999dc commit 534ac90
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions python/pyspark/sql/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ def __init__(self, elementType, containsNull=True):
>>> ArrayType(StringType(), False) == ArrayType(StringType())
False
"""
assert isinstance(elementType, DataType), "elementType should be DataType"
self.elementType = elementType
self.containsNull = containsNull

Expand Down Expand Up @@ -299,6 +300,8 @@ def __init__(self, keyType, valueType, valueContainsNull=True):
... == MapType(StringType(), FloatType()))
False
"""
assert isinstance(keyType, DataType), "keyType should be DataType"
assert isinstance(valueType, DataType), "valueType should be DataType"
self.keyType = keyType
self.valueType = valueType
self.valueContainsNull = valueContainsNull
Expand Down Expand Up @@ -355,6 +358,7 @@ def __init__(self, name, dataType, nullable=True, metadata=None):
... == StructField("f2", StringType(), True))
False
"""
assert isinstance(dataType, DataType), "dataType should be DataType"
self.name = name
self.dataType = dataType
self.nullable = nullable
Expand Down Expand Up @@ -403,6 +407,7 @@ def __init__(self, fields):
>>> struct1 == struct2
False
"""
assert all(isinstance(f, DataType) for f in fields), "fields should be a list of DataType"
self.fields = fields

def simpleString(self):
Expand Down

0 comments on commit 534ac90

Please sign in to comment.