Skip to content

Commit

Permalink
docs(typos): Fix several spelling errors
Browse files Browse the repository at this point in the history
I also updated the .gitignore to align with what we use on other repos.
  • Loading branch information
chriswmackey authored and Chris Mackey committed Apr 29, 2020
1 parent 8348b8c commit b2d2def
Show file tree
Hide file tree
Showing 24 changed files with 153 additions and 164 deletions.
11 changes: 6 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
*.pyc
test.py
ladybug/dotnet.py
ladybug/geometry.py
ladybug/output.py
ladybug/sunpathplus.py
.coverage
*.gh
.pytest_cache
*/__pycache__
.coverage
.ipynb_checkpoints
*.ipynb
.tox
*.egg-info
.eggs/*
tox.ini
build
dist
venv
/.cache
/.vscode
4 changes: 0 additions & 4 deletions ladybug/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@
import importlib
import pkgutil

# This is a variable to check if the library is a [+] library.
# TODO: Remove this line once the sunpath module no longer uses the isplus property
setattr(sys.modules[__name__], 'isplus', False)

# set up the logger
logger = get_logger(__name__)

Expand Down
12 changes: 6 additions & 6 deletions ladybug/_datacollectionbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,7 @@ def compute_function_aligned(funct, data_collections, data_type, unit):
pressure_at_denver]
humid_ratio = HourlyContinuousCollection.compute_function_aligned(
humid_ratio_from_db_rh, hr_inputs, HumidityRatio(), 'fraction')
# humid_ratio will be a Data Colleciton of humidity ratios at Denver
# humid_ratio will be a Data Collection of humidity ratios at Denver
"""
# check that all inputs are either data collections or floats
data_colls = []
Expand All @@ -512,7 +512,7 @@ def compute_function_aligned(funct, data_collections, data_type, unit):
try:
data_collections[i] = float(func_input)
except ValueError:
raise TypeError('Expected a number or a Data Colleciton. '
raise TypeError('Expected a number or a Data Collection. '
'Got {}'.format(type(func_input)))

# run the function and return the result
Expand Down Expand Up @@ -574,7 +574,7 @@ def _replace_operators(statement):

@staticmethod
def _restore_operators(statement):
"""Restore python logical operators from previusly replaced ones."""
"""Restore python logical operators from previously replaced ones."""
return statement.replace("&&", "and").replace("||", "or") \
.replace("~", "not").replace("<<", "in").replace("$", "is")

Expand Down Expand Up @@ -777,8 +777,8 @@ def _check_values(self, values):
isinstance(values, (str, dict, bytes, bytearray)), \
'values should be a list or tuple. Got {}'.format(type(values))
assert len(values) == len(self.datetimes), \
'Length of values list must match length of datetimes list. {} != {}'.format(
len(values), len(self.datetimes))
'Length of values list must match length of datetimes list. ' \
'{} != {}'.format(len(values), len(self.datetimes))
assert len(values) > 0, 'Data Collection must include at least one value'

def _check_aligned_header(self, data_type, unit):
Expand Down Expand Up @@ -904,7 +904,7 @@ def _sub_values(self, other):
new_vals = [v_1 - other for v_1 in self._values]
else:
assert self._collection_type == other._collection_type, \
'{} cannot be subtrated from {}'.format(other.__class__, self.__class__)
'{} cannot be subtracted from {}'.format(other.__class__, self.__class__)
assert len(self) == len(other), 'Length of DataCollections must match ' \
'to subtract one from the other. {} != {}'.format(len(self), len(other))
new_vals = [v_1 - v_2 for v_1, v_2 in zip(self._values, other._values)]
Expand Down
18 changes: 9 additions & 9 deletions ladybug/color.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,13 @@ class Colorset(object):
.. code-block:: python
# initiare colorsets
cs = Colorset()
print(cs[0])
>> [<R:75, G:107, B:169>, <R:115, G:147, B:202>, <R:170, G:200, B:247>,
<R:193, G:213, B:208>, <R:245, G:239, B:103>, <R:252, G:230, B:74>,
<R:239, G:156, B:21>, <R:234, G:123, B:0>, <R:234, G:74, B:0>,
<R:234, G:38, B:0>]
# initialize colorsets
cs = Colorset()
print(cs[0])
>> [<R:75, G:107, B:169>, <R:115, G:147, B:202>, <R:170, G:200, B:247>,
<R:193, G:213, B:208>, <R:245, G:239, B:103>, <R:252, G:230, B:74>,
<R:239, G:156, B:21>, <R:234, G:123, B:0>, <R:234, G:74, B:0>,
<R:234, G:38, B:0>]
"""
# base color sets for which there are several variations
_multicolored = [(4, 25, 145), (7, 48, 224), (7, 88, 255), (1, 232, 255),
Expand Down Expand Up @@ -367,7 +367,7 @@ def multicolored_3(cls):
def openstudio_palette(cls):
"""Standard color set for the OpenStudio surface types. Ordered as follows.
Exterior Wall, Interior Wall, Undreground Wall,
Exterior Wall, Interior Wall, Underground Wall,
Roof, Ceiling, Underground Roof,
Exposed Floor, Interior Floor, Ground Floor,
Window, Door, Shade, Air
Expand Down Expand Up @@ -399,7 +399,7 @@ class ColorRange(object):
R, G, B values. Default is Ladybug's original colorset.
domain: A list of at least two numbers to set the lower and upper
boundary of the color range. This can also be a list of more than
two values, which can be used to approximate logartihmic or other types
two values, which can be used to approximate logarithmic or other types
of color scales. However, the number of values in the domain must
always be less than or equal to the number of colors.
Default: [0, 1].
Expand Down
2 changes: 1 addition & 1 deletion ladybug/compass.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
class Compass(object):
"""Object for computing geometry for the compass used by a variety of graphics.
Methods to project points to orthograhpic and sterographic projectsions are
Methods to project points to orthograhpic and stereographic projectsions are
also within this class so that "domed" visualizations can be synchronized with
the compass in the 2D plane.
Expand Down
16 changes: 8 additions & 8 deletions ladybug/datacollection.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def validate_analysis_period(self):
5) Datetimes for February 29th are excluded if is_leap_year is False on
the analysis_period.
Note that there is no need to run this check any time that a discontinous
Note that there is no need to run this check any time that a discontinuous
data collection has been derived from a continuous one or when the
validated_a_period attribute of the collection is True. Furthermore, most
methods on this data collection will still run without a validated
Expand Down Expand Up @@ -384,7 +384,7 @@ def validate_analysis_period(self):
sort_datetimes = sort_datetimes[last_ind:] + sort_datetimes[:last_ind]
sort_values = sort_values[last_ind:] + sort_values[:last_ind]
# If datetimes are outside the a_period range, just make it annual.
# There's no way to know what side of the analysis_period should be etended.
# There's no way to know what side of the analysis_period should be extended.
if sort_datetimes[0].doy > a_per.end_time.doy and \
sort_datetimes[0].doy < a_per.st_time.doy:
n_ap[0], n_ap[1], n_ap[3], n_ap[4] = 1, 1, 12, 31
Expand Down Expand Up @@ -619,9 +619,9 @@ def interpolate_to_timestep(self, timestep, cumulative=None):
Args:
timestep: Target timestep as an integer. Target timestep must be
divisable by current timestep.
divisible by current timestep.
cumulative: A boolean that sets whether the interpolation
should treat the data colection values as cumulative, in
should treat the data collection values as cumulative, in
which case the value at each timestep is the value over
that timestep (instead of over the hour). The default will
check the DataType to see if this type of data is typically
Expand All @@ -632,7 +632,7 @@ def interpolate_to_timestep(self, timestep, cumulative=None):
the input timestep.
"""
assert timestep % self.header.analysis_period.timestep == 0, \
'Target timestep({}) must be divisable by current timestep({})' \
'Target timestep({}) must be divisible by current timestep({})' \
.format(timestep, self.header.analysis_period.timestep)
if cumulative is not None:
assert isinstance(cumulative, bool), \
Expand Down Expand Up @@ -732,7 +732,7 @@ def filter_by_analysis_period(self, analysis_period):
return _filtered_data

def filter_by_hoys(self, hoys):
"""Filter the Data Collection based onva list of hoys.
"""Filter the Data Collection using a list of hoys.
Args:
hoys: A List of hours of the year 0..8759
Expand Down Expand Up @@ -1119,7 +1119,7 @@ def validate_analysis_period(self):
4) February 29th is excluded if is_leap_year is False on the analysis_period.
Note that there is no need to run this check any time that a discontinous
Note that there is no need to run this check any time that a discontinuous
data collection has been derived from a continuous one or when the
validated_a_period attribute of the collection is True.
"""
Expand Down Expand Up @@ -1153,7 +1153,7 @@ def validate_analysis_period(self):
sort_datetimes = sort_datetimes[last_ind:] + sort_datetimes[:last_ind]
sort_values = sort_values[last_ind:] + sort_values[:last_ind]
# If datetimes are outside the a_period range, just make it annual.
# There's no way to know what side of the analysis_period should be etended.
# There's no way to know what side of the analysis_period should be extended.
if sort_datetimes[0] > a_per.end_time.doy and \
sort_datetimes[0] < a_per.st_time.doy:
n_ap[0], n_ap[1], n_ap[3], n_ap[4] = 1, 1, 12, 31
Expand Down
6 changes: 3 additions & 3 deletions ladybug/datacollectionimmutable.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
The only exceptions to this rule are:
* duplicate() - which will always return an exact copy of the collection including
its mutabiliy.
its mutability.
* get_aligned_collection() - which follows the mutability of the starting collection
by default but includes an parameter to override this.
Expand Down Expand Up @@ -54,7 +54,7 @@ def __setitem__(self, key, value):

class HourlyDiscontinuousCollectionImmutable(
_ImmutableCollectionBase, HourlyDiscontinuousCollection):
"""Immutable Discontinous Data Collection at hourly or sub-hourly intervals."""
"""Immutable Discontinuous Data Collection at hourly or sub-hourly intervals."""

def convert_to_culled_timestep(self, timestep=1):
"""This method is not available for immutable collections."""
Expand All @@ -67,7 +67,7 @@ def to_mutable(self):

class HourlyContinuousCollectionImmutable(
_ImmutableCollectionBase, HourlyContinuousCollection):
"""Immutable Continous Data Collection at hourly or sub-hourly intervals."""
"""Immutable Continuous Data Collection at hourly or sub-hourly intervals."""

def convert_to_culled_timestep(self, timestep=1):
"""This method is not available for immutable collections."""
Expand Down
2 changes: 1 addition & 1 deletion ladybug/datatype/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# coding=utf-8
"""Module of Data Types (eg. Temperature, Area, etc.)
Possesses capabilites for unit conversions and range checks.
Possesses capabilities for unit conversions and range checks.
It also includes descriptions of the data types and the units.
Properties:
Expand Down
8 changes: 4 additions & 4 deletions ladybug/datatype/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,10 @@ def name(self):

@property
def units(self):
"""A tuple of all accetpable units of the data type as abbreviated text.
"""A tuple of all acceptable units of the data type as abbreviated text.
The first item of the list should be the standard SI unit.
The second item of the list should be the stadard IP unit (if it exists).
The second item of the list should be the standard IP unit (if it exists).
The rest of the list can be any other acceptable units.
(eg. [C, F, K])
"""
Expand Down Expand Up @@ -268,7 +268,7 @@ def point_in_time(self):
"""Boolean to note whether data type is for a single instant in time.
If False, the data type is meant to represent an average or accumulation
over time whenenever found in an array of time series data.
over time whenever found in an array of time series data.
(True Examples: Temperature, WindSpeed)
(False Examples: Energy, Radiation, Illuminance)
"""
Expand All @@ -290,7 +290,7 @@ def normalized_type(self):
"""A data type object representing the area-normalized version of this data type.
This will be None if the data type cannot be normalized per unit area to
yeild a meaningful data type.
yield a meaningful data type.
"""
return self._normalized_type

Expand Down
2 changes: 1 addition & 1 deletion ladybug/datatype/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class GenericType(DataTypeBase):
def __init__(self, name, unit, min=float('-inf'), max=float('+inf'),
abbreviation=None, unit_descr=None, point_in_time=True,
cumulative=False):
"""Initalize Generic Type.
"""Initialize Generic Type.
"""
assert isinstance(name, str), 'name must be a string. Got {}.'.format(type(name))
assert isinstance(unit, str), 'unit must be a string. Got {}.'.format(type(unit))
Expand Down
4 changes: 2 additions & 2 deletions ladybug/ddy.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def from_dict(cls, data):

@classmethod
def from_ddy_file(cls, file_path):
"""Initalize from a ddy file object from an existing ddy file.
"""Initialize from a ddy file object from an existing ddy file.
Args:
file_path: A string representing a complete path to the .ddy file.
Expand Down Expand Up @@ -103,7 +103,7 @@ def from_ddy_file(cls, file_path):

@classmethod
def from_design_day(cls, design_day):
"""Initalize from a ddy file object from a ladybug design day object.
"""Initialize from a ddy file object from a ladybug design day object.
Args:
design_day: A Ladybug DesignDay object.
Expand Down
Loading

0 comments on commit b2d2def

Please sign in to comment.