Skip to content

Commit

Permalink
Merge pull request #39 from thombashi/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
thombashi committed Aug 11, 2016
2 parents 32720b2 + 93a2a8d commit 4d552de
Show file tree
Hide file tree
Showing 18 changed files with 111 additions and 91 deletions.
4 changes: 2 additions & 2 deletions docs/make_readme.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def write_examples(maker):
maker.write_chapter("For more information")
maker.write_line_list([
"More examples are available at ",
"http://%s.readthedocs.org/en/latest/pages/examples/index.html" % (
"http://{:s}.readthedocs.org/en/latest/pages/examples/index.html".format(
PROJECT_NAME.lower()),
])

Expand All @@ -54,7 +54,7 @@ def main():
maker.set_indent_level(0)
maker.write_chapter("Documentation")
maker.write_line_list([
"http://%s.readthedocs.org/en/latest/" % (PROJECT_NAME.lower()),
"http://{:s}.readthedocs.org/en/latest/".format(PROJECT_NAME.lower()),
])

maker.write_chapter("Related project")
Expand Down
6 changes: 6 additions & 0 deletions docs/pages/reference/error.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,9 @@ Errors

.. autoexception:: simplesqlite.SqlSyntaxError
:show-inheritance:

.. autoexception:: simplesqlite.loader.InvalidDataError
:show-inheritance:

.. autoexception:: simplesqlite.loader.ValidationError
:show-inheritance:
1 change: 1 addition & 0 deletions requirements/docs_requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
Sphinx
sphinx_rtd_theme
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

setuptools.setup(
name="SimpleSQLite",
version="0.4.4",
version="0.4.5",
url="https://github.com/thombashi/SimpleSQLite",
bugtrack_url="https://github.com/thombashi/SimpleSQLite/issues",

Expand Down
6 changes: 3 additions & 3 deletions simplesqlite/_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def append_table(con_src, con_dst, table_name):
if src_attr_list != dst_attr_list:
raise ValueError("""
source and destination attribute is different from each other
src: %s
dst: %s
""" % (str(src_attr_list), str(dst_attr_list)))
src: {:s}
dst: {:s}
""".format(str(src_attr_list), str(dst_attr_list)))

result = con_src.select(select="*", table_name=table_name)
if result is None:
Expand Down
69 changes: 34 additions & 35 deletions simplesqlite/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,17 +222,16 @@ def execute_query(self, query, caller=None):

try:
result = self.connection.execute(query)
except sqlite3.OperationalError:
_, e, _ = sys.exc_info() # for python 2.5 compatibility
except sqlite3.OperationalError as e:
if caller is None:
caller = logging.getLogger().findCaller()
file_path, line_no, func_name = caller[:3]
message_list = [
"failed to execute query at %s(%d) %s" % (
"failed to execute query at %{:s}({:d}) {:s}".format(
file_path, line_no, func_name),
" - query: %s" % (query),
" - msg: %s" % (e),
" - db: %s" % (self.database_path),
" - query: {:s}".format(query),
" - msg: {:s}".format(str(e)),
" - db: {:s}".format(self.database_path),
]
raise sqlite3.OperationalError(os.linesep.join(message_list))

Expand Down Expand Up @@ -323,17 +322,16 @@ def insert_many(self, table_name, insert_record_list):

try:
self.connection.executemany(query, record_list)
except sqlite3.OperationalError:
_, e, _ = sys.exc_info() # for python 2.5 compatibility
except sqlite3.OperationalError as e:
caller = logging.getLogger().findCaller()
file_path, line_no, func_name = caller[:3]
raise sqlite3.OperationalError(
"%s(%d) %s: failed to execute query:\n" % (
"{:s}({:d}) {:s}: failed to execute query:\n".format(
file_path, line_no, func_name) +
" query=%s\n" % (query) +
" msg='%s'\n" % (str(e)) +
" db=%s\n" % (self.database_path) +
" records=%s\n" % (record_list[:2])
" query={:s}\n".format(query) +
" msg='{:s}'\n".format(str(e)) +
" db={:s}\n".format(self.database_path) +
" records={:s}\n".format(record_list[:2])
)

def update(self, table_name, set_query, where=None):
Expand Down Expand Up @@ -476,7 +474,7 @@ def get_attribute_name_list(self, table_name):

self.verify_table_existence(table_name)

query = "SELECT * FROM '%s'" % (table_name)
query = "SELECT * FROM '{:s}'".format(table_name)
result = self.execute_query(query, logging.getLogger().findCaller())

return self.__get_list_from_fetch(result.description)
Expand All @@ -498,7 +496,7 @@ def get_attr_type(self, table_name):
self.verify_table_existence(table_name)

result = self.execute_query(
"SELECT sql FROM sqlite_master WHERE type='table' and name=%s" % (
"SELECT sql FROM sqlite_master WHERE type='table' and name={:s}".format(
SqlQuery.to_value_str(table_name)))
query = result.fetchone()[0]
match = re.search("[(].*[)]", query)
Expand Down Expand Up @@ -527,9 +525,9 @@ def get_attribute_type_list(self, table_name):
self.verify_table_existence(table_name)

attribute_name_list = self.get_attribute_name_list(table_name)
query = "SELECT DISTINCT %s FROM '%s'" % (
query = "SELECT DISTINCT {:s} FROM '{:s}'".format(
",".join([
"TYPEOF(%s)" % (SqlQuery.to_attr_str(attribute))
"TYPEOF({:s})".format(SqlQuery.to_attr_str(attribute))
for attribute in attribute_name_list]),
table_name)
result = self.execute_query(query, logging.getLogger().findCaller())
Expand Down Expand Up @@ -583,9 +581,9 @@ def get_profile(self, profile_count=50):

try:
result = con_tmp.select(
select="%s,SUM(%s),SUM(%s)" % attribute_name_list,
select="{:s},SUM({:s}),SUM({:s})".format(*attribute_name_list),
table_name=profile_table_name,
extra="GROUP BY %s ORDER BY %s DESC LIMIT %d" % (
extra="GROUP BY {:s} ORDER BY {:s} DESC LIMIT {:d}".format(
"query", "cumulative_time", profile_count))
except sqlite3.OperationalError:
return []
Expand Down Expand Up @@ -832,7 +830,8 @@ def verify_table_existence(self, table_name):
return

raise TableNotFoundError(
"'%s' table not found in %s" % (table_name, self.database_path))
"'{:s}' table not found in {:s}".format(
table_name, self.database_path))

def verify_attribute_existence(self, table_name, attribute_name):
"""
Expand Down Expand Up @@ -879,7 +878,7 @@ def verify_attribute_existence(self, table_name, attribute_name):
return

raise AttributeNotFoundError(
"'%s' attribute not found in '%s' table" % (
"'{:s}' attribute not found in '{:s}' table".format(
attribute_name, table_name))

def drop_table(self, table_name):
Expand All @@ -893,7 +892,7 @@ def drop_table(self, table_name):
self.validate_access_permission(["w", "a"])

if self.has_table(table_name):
query = "DROP TABLE IF EXISTS '%s'" % (table_name)
query = "DROP TABLE IF EXISTS '{:s}'".format(table_name)
self.execute_query(query, logging.getLogger().findCaller())
self.commit()

Expand All @@ -912,7 +911,7 @@ def create_table(self, table_name, attribute_description_list):
if self.has_table(table_name):
return True

query = "CREATE TABLE IF NOT EXISTS '%s' (%s)" % (
query = "CREATE TABLE IF NOT EXISTS '{:s}' ({:s})".format(
table_name, ", ".join(attribute_description_list))
if self.execute_query(query, logging.getLogger().findCaller()) is None:
return False
Expand All @@ -934,14 +933,14 @@ def create_index(self, table_name, attribute_name):
self.verify_table_existence(table_name)
self.validate_access_permission(["w", "a"])

index_name = "%s_%s_index" % (
index_name = "{:s}_{:s}_index".format(
SqlQuery.sanitize(table_name), SqlQuery.sanitize(attribute_name))
if attribute_name.find("'") != -1:
query_format = 'CREATE INDEX IF NOT EXISTS %s ON %s("%s")'
query_format = 'CREATE INDEX IF NOT EXISTS {:s} ON {:s}("{:s})'
else:
query_format = "CREATE INDEX IF NOT EXISTS %s ON %s('%s')"
query_format = "CREATE INDEX IF NOT EXISTS {:s} ON {:s}('{:s}')"

query = query_format % (
query = query_format.format(
index_name,
SqlQuery.to_table_str(table_name),
attribute_name)
Expand Down Expand Up @@ -1000,7 +999,7 @@ def create_table_with_data(
self.validate_access_permission(["w", "a"])

if dataproperty.is_empty_sequence(data_matrix):
raise ValueError("input data is null: '%s (%s)'" % (
raise ValueError("input data is null: '{:s} ({:s})'".format(
table_name, ", ".join(attribute_name_list)))

data_matrix = RecordConvertor.to_record_list(
Expand Down Expand Up @@ -1209,10 +1208,10 @@ def __verify_value_matrix(field_list, value_matrix):

raise ValueError(
"miss match header length and value length:" +
" header: %d %s\n" % (len(field_list), str(field_list)) +
" # of miss match line: %d ouf of %d\n" % (
" header: {:d} {:s}\n".format(len(field_list), str(field_list)) +
" # of miss match line: {:d} ouf of {:d}\n".format(
len(miss_match_idx_list), len(value_matrix)) +
" e.g. value at line=%d, len=%d: %s\n" % (
" e.g. value at line={:d}, len={:d}: {:s}\n".format(
miss_match_idx_list[0],
len(sample_miss_match_list), str(sample_miss_match_list))
)
Expand Down Expand Up @@ -1241,11 +1240,11 @@ def __get_attr_desc_list(self, attr_name_list, data_matrix):
six.iteritems(self.__get_column_valuetype(data_matrix))):
attr_name = attr_name_list[col]
if attr_name.find("'") != -1:
desc_format = '"%s" %s'
desc_format = '"{:s}" {:s}'
else:
desc_format = "'%s' %s"
desc_format = "'{:s}' {:s}"
attr_description_list.append(
desc_format % (attr_name, value_type))
desc_format.format(attr_name, value_type))

return attr_description_list

Expand All @@ -1268,7 +1267,7 @@ def validate_access_permission(self, valid_permission_list):

if self.mode not in valid_permission_list:
raise IOError(
"invalid access: expected-mode='%s', current-mode='%s'" % (
"invalid access: expected-mode='{:s}', current-mode='{:s}'".format(
"' or '".join(valid_permission_list), self.mode))

@staticmethod
Expand Down
6 changes: 4 additions & 2 deletions simplesqlite/loader/csv/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ def load(self):
Table name is determined by
:py:meth:`~.CsvTableFileLoader.make_table_name`.
:rtype: iterator of |TableData|
:raises InvalidDataError: If the CSV data is invalid.
:raises simplesqlite.loader.InvalidDataError:
If the CSV data is invalid.
.. seealso:: :py:func:`csv.reader`
"""
Expand Down Expand Up @@ -136,7 +137,8 @@ def load(self):
:return: Loaded table data.
:rtype: iterator of |TableData|
:raises InvalidDataError: If the CSV data is invalid.
:raises simplesqlite.loader.InvalidDataError:
If the CSV data is invalid.
.. seealso:: :py:func:`csv.reader`
"""
Expand Down
8 changes: 6 additions & 2 deletions simplesqlite/loader/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@


class ValidationError(Exception):
pass
"""
Raised data is not properly formatted.
"""


class InvalidDataError(Exception):
pass
"""
Raised when data is invalid to load.
"""
4 changes: 2 additions & 2 deletions simplesqlite/loader/formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ class TableFormatterInterface(object):
"""

@abc.abstractmethod
def to_table_data(self): # pragma: no cover
def to_table_data(self): # pragma: no cover
pass

@abc.abstractmethod
def _validate_source_data(self): # pragma: no cover
def _validate_source_data(self): # pragma: no cover
pass


Expand Down
8 changes: 4 additions & 4 deletions simplesqlite/loader/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,19 @@ class TableLoaderInterface(object):
"""

@abc.abstractmethod
def load(self): # pragma: no cover
def load(self): # pragma: no cover
pass

@abc.abstractmethod
def _validate(self): # pragma: no cover
def _validate(self): # pragma: no cover
pass

@abc.abstractmethod
def _validate_table_name(self): # pragma: no cover
def _validate_table_name(self): # pragma: no cover
pass

@abc.abstractmethod
def _validate_source(self): # pragma: no cover
def _validate_source(self): # pragma: no cover
pass


Expand Down
10 changes: 5 additions & 5 deletions simplesqlite/loader/json/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,7 @@


from __future__ import absolute_import

try:
import json
except ImportError:
import simplejson as json
import json

import path

Expand Down Expand Up @@ -93,6 +89,10 @@ def load(self):
:return: Loaded table data.
:rtype: iterator of |TableData|
:raises simplesqlite.loader.InvalidDataError:
If the data is invalid JSON.
:raises simplesqlite.loader.ValidationError:
If the data is not acceptable JSON format.
"""

self._validate()
Expand Down
6 changes: 3 additions & 3 deletions simplesqlite/loader/json/formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self, json_buffer):
self._buffer = json_buffer

@abc.abstractproperty
def _schema(self): # pragma: no cover
def _schema(self): # pragma: no cover
pass

def _validate_source_data(self):
Expand Down Expand Up @@ -68,7 +68,7 @@ def _schema(self):
def to_table_data(self):
"""
:raises ValueError:
:raises ValidationError:
:raises simplesqlite.loader.ValidationError:
"""

self._validate_source_data()
Expand Down Expand Up @@ -109,7 +109,7 @@ def _schema(self):
def to_table_data(self):
"""
:raises ValueError:
:raises ValidationError:
:raises simplesqlite.loader.ValidationError:
"""

self._validate_source_data()
Expand Down
10 changes: 5 additions & 5 deletions simplesqlite/loader/spreadsheet/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,23 +35,23 @@ def __init__(self, source):
self._end_col_idx = None

@abc.abstractproperty
def _sheet_name(self): # pragma: no cover
def _sheet_name(self): # pragma: no cover
pass

@abc.abstractproperty
def _row_count(self): # pragma: no cover
def _row_count(self): # pragma: no cover
pass

@abc.abstractproperty
def _col_count(self): # pragma: no cover
def _col_count(self): # pragma: no cover
pass

@abc.abstractmethod
def _is_empty_sheet(self): # pragma: no cover
def _is_empty_sheet(self): # pragma: no cover
pass

@abc.abstractmethod
def _get_start_row_idx(self): # pragma: no cover
def _get_start_row_idx(self): # pragma: no cover
pass

def make_table_name(self):
Expand Down
Loading

0 comments on commit 4d552de

Please sign in to comment.