diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 0000000..ea56cf5 --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,22 @@ +[bumpversion] +current_version = 3.1.0 + +[bumpversion:file:pyproject.toml] +search = version = "{current_version}" +replace = version = "{new_version}" + +[bumpversion:file:dbutils/__init__.py] +search = __version__ = '{current_version}' +replace = __version__ = '{new_version}' + +[bumpversion:file:README.md] +search = The current version {current_version} +replace = The current version {new_version} + +[bumpversion:file:docs/main.rst] +search = :Version: {current_version} +search = :Version: {new_version} + +[bumpversion:file:docs/main.de.rst] +search = :Version: {current_version} +search = :Version: {new_version} diff --git a/.github/workflows/publish_on_pypi.yml b/.github/workflows/publish_on_pypi.yml new file mode 100644 index 0000000..99cba8a --- /dev/null +++ b/.github/workflows/publish_on_pypi.yml @@ -0,0 +1,30 @@ +name: Publish DBUtils on PyPI + +on: + push: + tags: + - 'Release-*' + +jobs: + publish: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install build tool + run: python -m pip install build --user + + - name: Build source tarball and wheel + run: python -m build + + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/test_with_tox.yml b/.github/workflows/test_with_tox.yml new file mode 100644 index 0000000..5a0f068 --- /dev/null +++ b/.github/workflows/test_with_tox.yml @@ -0,0 +1,25 @@ +name: Test DBUtils using tox + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v4 + + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - run: pip install tox + + - run: tox -e py + + - if: matrix.python == 3.11 + run: TOXENV=ruff,manifest,docs,spell tox diff --git a/.gitignore b/.gitignore index 1663b67..b9cc250 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ build dist +local .idea .tox diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 4c3d6ec..0000000 --- a/.pylintrc +++ /dev/null @@ -1,317 +0,0 @@ -# lint Python modules using external checkers. -# -# This is the main checker controling the other ones and the reports -# generation. It is itself both a raw checker and an astng checker in order -# to: -# * handle message activation / deactivation at the module level -# * handle some basic but necessary stats'data (number of classes, methods...) - -# -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add <file or directory> to the black list. It should be a base name, not a -# path. You may set this option multiple times. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=yes - -# Set the cache size for astng objects. -cache-size=500 - -# List of plugins (as comma separated values of python modules names) to load, - -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable only checker(s) with the given id(s). This option conflict with the -# disable-checker option -#enable-checker= - -# Enable all checker(s) except those with the given id(s). This option conflict -# with the disable-checker option -#disable-checker= - -# Enable all messages in the listed categories. -#enable-msg-cat= - -# Disable all messages in the listed categories. -#disable-msg-cat= - -# Enable the message(s) with the given id(s). -#enable-msg= - -# Disable the message(s) with the given id(s). -#disable-msg= - - -[REPORTS] - -# set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html -output-format=text - -# Include message's id in output -include-ids=no - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells wether to display a full report or only the messages -reports=yes - -# Python expression which should return a note less than 10 (10 is the highest -# note).You have access to the variables errors warning, statement which -# respectivly contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (R0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (R0004). -comment=no - -# Enable the report(s) with the given id(s). -#enable-report= - -# Disable the report(s) with the given id(s). -#disable-report= - - -# checks for : - -# * doc strings -# * modules / classes / functions / methods / arguments / variables name -# * number of arguments, local variables, branchs, returns and statements in -# functions, methods -# * required module attributes -# * dangerous default values as arguments -# * redefinition of function / method / class -# * uses of the global statement -# -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# Regular expression which should only match functions or classes name which do - -# not require a docstring -no-docstring-rgx=__.*__ - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct list comprehension / - -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - - -# try to find bugs in the code using type inference -# -[TYPECHECK] - -# Tells wether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# When zope mode is activated, consider the acquired-members option to ignore -# access to some undefined attributes. -zope=no - -# List of members which are usually get through zope's acquisition mecanism and -# so shouldn't trigger E0201 when accessed (need zope=yes to be considered). -acquired-members=REQUEST,acl_users,aq_parent - - -# checks for -# * unused variables / imports -# * undefined variables -# * redefinition of variable from builtins or from an outer scope -# * use of variable before assigment -# -[VARIABLES] - -# Tells wether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching names used for dummy variables (i.e. not used). -dummy-variables-rgx=_|dummy - -# List of additional names supposed to be defined in builtins. Remember that - -# you should avoid to define new builtins when possible. -additional-builtins= - - -# checks for : -# * methods without self as first argument -# * overridden methods signature -# * access only to existant members via self -# * attributes not defined in the __init__ method -# * supported interfaces implementation -# * unreachable code -# -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - - -# checks for sign of poor/misdesign: -# * number of methods, attributes, local variables... -# * size, complexity of functions, methods -# -[DESIGN] - -# Maximum number of arguments for function / method -#max-args=5 -max-args=15 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -#max-branchs=12 -max-branchs=25 - -# Maximum number of statements in function / method body -#max-statements=50 -max-statements=75 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -#max-attributes=7 -max-attributes=20 - -# Minimum number of public methods for a class (see R0903). -#min-public-methods=2 -min-public-methods=1 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - - -# checks for -# * external modules dependencies -# * relative / wildcard imports -# * cyclic imports -# * uses of deprecated modules -# -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,string,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report R0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report R0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report R0402 must -# not be disabled) -int-import-graph= - - -# checks for : -# * unauthorized constructions -# * strict indentation -# * line length -# * use of <> instead of != -# -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -# checks for: -# * warning notes in the code like FIXME, XXX -# * PEP 263: source code with non ascii character but no encoding declaration -# -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -# checks for similarities and duplicated code. This computation may be -# memory / CPU intensive, so you should disable it if you experiments some -# problems. - -# -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 5da2af9..0000000 --- a/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Travis CI configuration -# see https://docs.travis-ci.com/user/languages/python - -language: python - -python: - - "2.7" - - "3.5" - - "3.6" - - "3.7" - - "3.8" - -install: - - pip install pytest - - pip install . - -script: pytest DBUtils/Tests/Test*.py diff --git a/DBUtils/Docs/Doc.css b/DBUtils/Docs/Doc.css deleted file mode 100644 index 391e17c..0000000 --- a/DBUtils/Docs/Doc.css +++ /dev/null @@ -1,177 +0,0 @@ -/* - Webware for Python (https://webwareforpython.github.io/w4py/) - - Common style sheet for Webware's documentation pages -*/ - -/* First import default style for pages created with Docutils: */ - -@import url(DocUtils.css); - -/* Customization for Webware goes here: */ - -body { - background-color: #FFFFFF; - font-family: Verdana, Arial, Helvetica, sans-serif; - font-size: 10pt; - padding: 12pt; -} -table { - empty-cells: show; - border-collapse: collapse; - margin: 0 auto; -} -table.doc { - border-spacing: 2px; - border-collapse: separate; - border-style: none; -} -td, th { - font-family: Verdana, Arial, Helvetica, sans-serif; - font-size: 10pt; -} -table.doc td, table.doc th { - padding: 4px; - border-style: none; -} -p { - margin-top: 6pt; - margin-bottom: 6pt; - text-align: justify; -} -li { - margin-bottom: 6pt; -} -h1, h2 { - font-family: Verdana, Arial, Helvetica, sans-serif; - color: #002352; -} -h3, h4 { - font-family: Verdana, Arial, Helvetica, sans-serif; - color: #002352; -} -h1 { - font-size: 18pt; -} -h2 { - font-size: 16pt; -} -h3 { - font-size: 14pt; -} -h4 { - font-size: 12pt; -} -h5 { - font-size: 11pt; -} -h6 { - font-size: 10pt; -} -h1.titlebar { - padding: 4pt; - margin-bottom: 12pt; - text-align: center; - color: white; - background-color: #025; -} -h1.title, h1.header { - padding: 4pt; - margin-bottom: 12pt; - text-align: center; - border-bottom: 1pt solid #025; - padding-bottom: 8pt; -} -div.footer { - font-family: Tahoma, Arial, Helvetica, sans-serif; - font-size: 9pt; - text-align: center; - padding: 4pt; - margin-top: 16pt; - border-top: 1pt solid #025; -} -.left { - text-align: left; -} -.right { - text-align: right; -} -.center { - text-align: center; -} -.top { - vertical-align: top; -} -.nowrap { - white-space: nowrap; -} -.contents { - font-family: Tahoma, Arial, Helvetica, sans-serif; -} -.contents ul { - list-style: none; - margin-bottom: 24pt; - padding-left: 0em; - margin-left: 2em; -} -.contents ul li { - font-size: 11pt; - margin-bottom: 3pt; -} -.contents ul ul { - list-style-type: none; - margin-top: 2pt; - margin-bottom: 2pt; - padding-left: 0em; - margin-left: 1.5em; -} -.contents ul ul li { - font-size: 10pt; - margin-bottom: 1pt; -} -.contents .topic-title { - font-size: 16pt; -} -span.name { - font-weight: bold; -} -span.filename { - font-family: Tahoma, Arial, Helvetica, sans-serif; - font-size: 9pt; -} -code, .literal, .literal-block, .pre, .py { - font-family: "Andale Mono", "Lucida Console", Monaco, "Courier New", Courier, monospace; - font-size: 10pt; - color: #052; -} -tt.literal, span.pre { - background-color: #FFFFFF; -} -pre.py, pre.literal-block { - margin: 0; - padding: 2pt 1pt 1pt 2pt; - background-color: #F0F0F8; -} -.typed { - font-weight: bold; -} -.error { - color: red; -} -.warning { - color: brown; -} - -/* Configuration documentation: */ - -dl.config { -} -dt.config { -} -dd.config { -} -span.setting { - font-family: Tahoma, Arial, Helvetica, sans-serif; - font-size: 9pt; - font-weight: bold; -} diff --git a/DBUtils/Docs/DocUtils.css b/DBUtils/Docs/DocUtils.css deleted file mode 100644 index 5219d8e..0000000 --- a/DBUtils/Docs/DocUtils.css +++ /dev/null @@ -1,101 +0,0 @@ -/* CSS 3.1 style sheet for the output of Docutils 0.13 HTML5 writer. */ -.align-left{text-align:left} -.align-right{text-align:right} -.align-center{clear:both;text-align:center} -.align-top{vertical-align:top} -.align-middle{vertical-align:middle} -.align-bottom{vertical-align:bottom} -h1.title,p.subtitle{text-align:center} -p.admonition-title,p.topic-title,p.sidebar-title,p.rubric,p.system-message-title{font-weight:700} -h1 + p.subtitle,h1 + p.section-subtitle{font-size:1.6em} -h2 + p.section-subtitle{font-size:1.28em} -p.subtitle,p.section-subtitle,p.sidebar-subtitle{font-weight:700;margin-top:-.5em} -p.sidebar-title,p.rubric{font-size:larger} -p.rubric{color:maroon} -a.toc-backref{color:#000;text-decoration:none} -div.caution p.admonition-title,div.attention p.admonition-title,div.danger p.admonition-title,div.error p.admonition-title,div.warning p.admonition-title,div.system-messages h1,div.error,span.problematic,p.system-message-title{color:red} -span.docutils.literal{font-family:monospace;white-space:pre-wrap} -.literal > span.pre{white-space:nowrap} -.simple li,.compact li,.simple ul,.compact ul,.simple ol,.compact ol,.simple > li p,.compact > li p,dl.simple > dd,dl.compact > dd{margin-top:0;margin-bottom:0} -div.topic.contents{margin:0} -ul.auto-toc{list-style-type:none;padding-left:1.5em} -ol.arabic{list-style:decimal} -ol.loweralpha{list-style:lower-alpha} -ol.upperalpha{list-style:upper-alpha} -ol.lowerroman{list-style:lower-roman} -ol.upperroman{list-style:upper-roman} -dt span.classifier{font-style:italic} -dt span.classifier:before{font-style:normal;margin:.5em;content:":"} -dl.field-list > dt,dl.option-list > dt,dl.docinfo > dt,dl.footnote > dt,dl.citation > dt{font-weight:700;clear:left;float:left;margin:0;padding:0;padding-right:.5em} -dl.field-list > dd,dl.option-list > dd,dl.docinfo > dd{margin-left:9em} -dl.field-list > dd > :first-child,dl.option-list > dd > :first-child{display:inline-block;width:100%;margin:0} -dl.field-list > dt:after,dl.docinfo > dt:after{content:":"} -pre.address{font:inherit} -dd.authors > p{margin:0} -dl.option-list{margin-left:40px} -dl.option-list > dt{font-weight:400} -span.option{white-space:nowrap} -dl.footnote.superscript > dd{margin-left:1em} -dl.footnote.brackets > dd{margin-left:2em} -dl > dt.label{font-weight:400} -a.footnote-reference.brackets:before,dt.label > span.brackets:before{content:"["} -a.footnote-reference.brackets:after,dt.label > span.brackets:after{content:"]"} -a.footnote-reference.superscript,dl.footnote.superscript > dt.label{vertical-align:super;font-size:smaller} -dt.label > span.fn-backref{margin-left:.2em} -dt.label > span.fn-backref > a{font-style:italic} -div.line-block{display:block} -div.line-block div.line-block{margin-top:0;margin-bottom:0;margin-left:40px} -.figure.align-left,img.align-left,object.align-left,table.align-left{margin-right:auto} -.figure.align-center,img.align-center,object.align-center{margin-left:auto;margin-right:auto;display:block} -table.align-center{margin-left:auto;margin-right:auto} -.figure.align-right,img.align-right,object.align-right,table.align-right{margin-left:auto} -div.align-left,div.align-center,div.align-right,table.align-left,table.align-center,table.align-right{text-align:inherit} -div.admonition,div.system-message,div.sidebar{margin:40px;border:medium outset;padding-right:1em;padding-left:1em} -div.sidebar{width:30%;max-width:26em;float:right;clear:right} -div.topic,pre.literal-block,pre.doctest-block,pre.math,pre.code{margin-right:40px;margin-left:40px} -pre.code .ln{color:gray} -table{border-collapse:collapse} -td,th{border-style:solid;border-color:silver;padding:0 1ex;border-width:thin} -td > p:first-child,th > p:first-child{margin-top:0} -td > p,th > p{margin-bottom:0} -table > caption{text-align:left;margin-bottom:.25em} -table.borderless td,table.borderless th{border:0;padding:0;padding-right:.5em} -body{padding:0 5%;margin:8px 0} -div.document{line-height:1.3;counter-reset:table;max-width:50em;margin:auto} -hr.docutils{width:80%;margin-top:1em;margin-bottom:1em;clear:both} -p,ol,ul,dl,div.line-block,table{margin-top:.5em;margin-bottom:.5em} -h1,h2,h3,h4,h5,h6,dl > dd{margin-bottom:.5em} -dl > dd p:first-child{margin-top:0} -dd > ul:only-child,dd > ol:only-child{padding-left:1em} -dl.description > dt{font-weight:700;clear:left;float:left;margin:0;padding:0;padding-right:.5em} -dl.field-list.narrow > dd{margin-left:5em} -dl.field-list.run-in > dd p{display:block} -div.abstract p.topic-title{text-align:center} -div.dedication{margin:2em 5em;text-align:center;font-style:italic} -div.dedication p.topic-title{font-style:normal} -dl.citation dt.label{font-weight:700} -span.fn-backref{font-weight:400} -pre.literal-block,pre.doctest-block,pre.math,pre.code{margin-left:1.5em;margin-right:1.5em} -blockquote,div.topic{margin-left:1.5em;margin-right:1.5em} -blockquote > table,div.topic > table{margin-top:0;margin-bottom:0} -blockquote p.attribution,div.topic p.attribution{text-align:right;margin-left:20%} -table tr{text-align:left} -table.booktabs{border:0;border-top:2px solid;border-bottom:2px solid;border-collapse:collapse} -table.booktabs *{border:0} -table.booktabs th{border-bottom:thin solid} -table.numbered > caption:before{counter-increment:table;content:"Table " counter(table) ": ";font-weight:700} -dl.footnote{padding-left:1ex;border-left:solid;border-left-width:thin} -.figure.align-left,img.align-left,object.align-left{display:block;clear:left;float:left;margin-right:1em} -.figure.align-right,img.align-right,object.align-right{display:block;clear:right;float:right;margin-left:1em} -h1,h2,h3{clear:both} -div.sidebar{width:30%;max-width:26em;margin-left:1em;margin-right:-5.5%;background-color:#ffe} -pre.code,code{background-color:#eee} -pre.code .ln{color:gray} -pre.code .comment,code .comment{color:#5C6576} -pre.code .keyword,code .keyword{color:#3B0D06;font-weight:700} -pre.code .literal.string,code .literal.string{color:#0C5404} -pre.code .name.builtin,code .name.builtin{color:#352B84} -pre.code .deleted,code .deleted{background-color:#DEB0A1} -pre.code .inserted,code .inserted{background-color:#A3D289} -div.footer,div.header{clear:both;font-size:smaller} -a{text-decoration:none} \ No newline at end of file diff --git a/DBUtils/Docs/RelNotes-0.8.1.html b/DBUtils/Docs/RelNotes-0.8.1.html deleted file mode 100644 index fbb16f8..0000000 --- a/DBUtils/Docs/RelNotes-0.8.1.html +++ /dev/null @@ -1,19 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 0.8.1 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 0.8.1 Release Notes</h1> - -<p>DBUtils 0.8.1 was released on September 13, 2005.</p> - -<p>This is the first public release of DBUtils.</p> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-0.9.1.html b/DBUtils/Docs/RelNotes-0.9.1.html deleted file mode 100644 index 55b96b4..0000000 --- a/DBUtils/Docs/RelNotes-0.9.1.html +++ /dev/null @@ -1,29 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 0.9.1 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 0.9.1 Release Notes</h1> - -<p>DBUtils 0.9.1 was released on May 8, 2006.</p> - -<p>This is the second public release of DBUtils.</p> - -<h2>Changes:</h2> -<ul> -<li>Added <code>_closeable</code> attribute and made persistent connections -not closeable by default. This allows <code>PersistentDB</code> to be used -in the same way as you would use <code>PooledDB</code>.</li> -<li>Allowed arguments in the DB-API 2 <code>cursor()</code> method. -MySQLdb is using this to specify cursor classes. (Suggested by Michael Palmer.)</li> -<li>Improved the documentation and added a User's Guide.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-0.9.2.html b/DBUtils/Docs/RelNotes-0.9.2.html deleted file mode 100644 index b3e566d..0000000 --- a/DBUtils/Docs/RelNotes-0.9.2.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 0.9.2 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 0.9.2 Release Notes</h1> - -<p>DBUtils 0.9.2 was released on September 22, 2006.</p> - -<p>This is the third public release of DBUtils.</p> - -<h2>Changes:</h2> -<ul> -<li>Renamed <code>SolidDB</code> to <code>SteadyDB</code> to avoid confusion -with the <i>solidDB</i> storage engine.</li> -<li>Accordingly, renamed <code>SolidPg</code> to <code>SteadyPg</code>.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-0.9.3.html b/DBUtils/Docs/RelNotes-0.9.3.html deleted file mode 100644 index 72f4fb8..0000000 --- a/DBUtils/Docs/RelNotes-0.9.3.html +++ /dev/null @@ -1,34 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 0.9.3 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 0.9.3 Release Notes</h1> - -<p>DBUtils 0.9.3 was released on May 21, 2007.</p> - -<p>This is the fourth public release of DBUtils.</p> - -<h2>Changes:</h2> -<ul> -<li>Support custom creator functions for database connections. -These can now be used as the first parameter instead of an DB-API module -(suggested by Ezio Vernacotola).</li> -<li>Added destructor for steady connections.</li> -<li>Use <a href="https://pypi.python.org/pypi/setuptools">setuptools</a> -if available.</li> -<li>Some code cleanup.</li> -<li>Some fixes in the documentation. -Added <a href="UsersGuide.zh.html">Chinese translation</a> -of the <a href="UsersGuide.html">User's Guide</a>, -kindly contributed by <a href="http://blog.csdn.net/gashero">gashero</a>.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-0.9.4.html b/DBUtils/Docs/RelNotes-0.9.4.html deleted file mode 100644 index 674a6c8..0000000 --- a/DBUtils/Docs/RelNotes-0.9.4.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 0.9.4 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 0.9.4 Release Notes</h1> - -<p>DBUtils 0.9.4 was released on July 7, 2007.</p> - -<p>This is the fifth public release of DBUtils.</p> - -<p>This release fixes a problem in the destructor code and has been -supplemented with a German User's Guide.</p> - -<p>Please note that the <code>dbapi</code> parameter has been renamed to -<code>creator</code> in the last release since you can now pass custom -creator functions for database connections instead of DB-API 2 modules.</p> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.0.html b/DBUtils/Docs/RelNotes-1.0.html deleted file mode 100644 index 2606375..0000000 --- a/DBUtils/Docs/RelNotes-1.0.html +++ /dev/null @@ -1,49 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.0 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.0 Release Notes</h1> - -<p>DBUtils 1.0 was released on November 29, 2008.</p> - -<p>This is the sixth public release of DBUtils.</p> - -<h2>Changes:</h2> -<ul> -<li>Added a <code>failures</code> parameter for configuring the exception classes for -which the failover mechanisms is applied (as suggested by Matthew Harriger).</li> -<li>Added a <code>closeable</code> parameter for configuring whether connections -can be closed (otherwise closing connections will be silently ignored).</li> -<li>It is now possible to override defaults via the <code>creator.dbapi</code> -and <code>creator.threadsafety</code> attributes.</li> -<li>Added alias method <code>dedicated_connection</code> for -<code>connection(shareable=False)</code>.</li> -<li>Added a <code>version</code> attribute to all exported classes.</li> -<li>Where <code>0</code> has the meaning "unlimited", parameters can now be also -set to <code>None</code> instead.</li> -<li>It turned out that <code>threading.local</code> does not work properly with -<code>mod_wsgi</code>, so we use the Python implementation for thread-local data -even when a faster <code>threading.local</code> implementation is available. -A new parameter <code>threadlocal</code> allows you to pass an arbitrary class -such as <code>threading.local</code> if you know it works in your environment.</li> -</ul> - -<h2>Bugfixes and Improvements:</h2> -<ul> -<li>In some cases, when instance initialization failed or referenced objects -were already destroyed, finalizers could throw exceptions or create infinite -recursion (problem reported by Gregory Pinero and Jehiah Czebotar).</li> -<li>DBUtils now tries harder to find the underlying DB-API 2 module if only a -connection creator function is specified. This had not worked before with the -MySQLdb module (problem reported by Gregory Pinero).</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.1.1.html b/DBUtils/Docs/RelNotes-1.1.1.html deleted file mode 100644 index b780067..0000000 --- a/DBUtils/Docs/RelNotes-1.1.1.html +++ /dev/null @@ -1,32 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.1.1 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.1.1 Release Notes</h1> - -<p>DBUtils 1.1.1 was released on 02/04/17.</p> - -<p>This bugfix release is the eight public release of DBUtils.</p> - -<p>It is intended to be used with Python versions 2.3 to 2.7</p> - -<h2>Improvements:</h2> -<ul> -<li>Reopen <code>SteadyDB</code> connections when commit or rollback fails -(suggested by Ben Hoyt).</li> -</ul> - -<h2>Bugfixes:</h2> -<ul> -<li>Fixed a problem when running under Jython (reported by Vitaly Kruglikov).</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.1.html b/DBUtils/Docs/RelNotes-1.1.html deleted file mode 100644 index 6cdb156..0000000 --- a/DBUtils/Docs/RelNotes-1.1.html +++ /dev/null @@ -1,46 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.1 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.1 Release Notes</h1> - -<p>DBUtils 1.1 was released on August 14, 2011.</p> - -<p>This is the seventh public release of DBUtils.</p> - -<h2>Improvements:</h2> -<ul> -<li>The transparent reopening of connections is actually an undesired behavior -if it happens during database transactions. In these cases, the transaction -should fail and the error be reported back to the application instead of the -rest of the transaction being executed in a new connection and therefore in -a new transaction. Therefore DBUtils now allows suspending the transparent -reopening during transactions. All you need to do is indicate the beginning -of a transaction by calling the <code>begin()</code> method of the connection. -DBUtils makes sure that this method always exists, even if the database driver -does not support it.</li> -<li>If the database driver supports a <code>ping()</code> method, then DBUtils -can use it to check whether connections are alive instead of just trying -to use the connection and reestablishing it in case it was dead. Since these -checks are done at the expense of some performance, you have exact control -when these are executed via the new <code>ping</code> parameter.</li> -<li><code>PooledDB</code> has got another new parameter <code>reset</code> for -controlling how connections are reset before being put back into the pool.</li> -</ul> - -<h2>Bugfixes:</h2> -<ul> -<li>Fixed propagation of error messages when the connection was lost.</li> -<li>Fixed an issue with the <code>setoutputsize()</code> cursor method.</li> -<li>Fixed some minor issues with the <code>DBUtilsExample</code> for Webware.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.2.html b/DBUtils/Docs/RelNotes-1.2.html deleted file mode 100644 index a90eae2..0000000 --- a/DBUtils/Docs/RelNotes-1.2.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.2 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.2 Release Notes</h1> - -<p>DBUtils 1.2 was released on 02/05/17.</p> - -<p>This is the ninth public release of DBUtils.</p> - -<p>It is intended to be used with Python versions 2.6 and newer.</p> - -<h2>Improvements:</h2> -<ul> -<li>Python 3 is now supported.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.3.html b/DBUtils/Docs/RelNotes-1.3.html deleted file mode 100644 index 05f8614..0000000 --- a/DBUtils/Docs/RelNotes-1.3.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.3 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.3 Release Notes</h1> - -<p>DBUtils 1.3 was released on 03/08/18.</p> - -<p>This is the tenth public release of DBUtils.</p> - -<p>It is intended to be used with Python versions 2.6, 2.7 or 3.4 - 3.7.</p> - -<h2>Improvements:</h2> -<ul> -<li>Supports context handlers for connections and cursors.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/RelNotes-1.4.html b/DBUtils/Docs/RelNotes-1.4.html deleted file mode 100644 index 0565fd0..0000000 --- a/DBUtils/Docs/RelNotes-1.4.html +++ /dev/null @@ -1,26 +0,0 @@ -<!DOCTYPE html> -<html> -<head> -<title>DBUtils 1.4 Release Notes</title> -<link rel="stylesheet" href="Doc.css" type="text/css"> -</head> -<body> -<h1 class="header">DBUtils 1.4 Release Notes</h1> - -<p>DBUtils 1.4 was released on 09/26/20.</p> - -<p>This version is intended to be used with Python versions 2.7 and 3.5 to 3.9.</p> - -<h2>Improvements:</h2> -<ul> -<li>The <code>SteadyDB</code> and <code>SteadyPg</code> classes only reconnect -after the <code>maxusage</code> limit has been reached when the connection is -not currently inside a transaction.</li> -</ul> - -<div class="footer"> -DBUtils -(<a href="https://github.com/WebwareForPython/DBUtils">github.com/WebwareForPython/DBUtils</a>) -</div> -</body> -</html> diff --git a/DBUtils/Docs/dbdep.gif b/DBUtils/Docs/dbdep.gif deleted file mode 100644 index 8554b62..0000000 Binary files a/DBUtils/Docs/dbdep.gif and /dev/null differ diff --git a/DBUtils/Docs/persist.gif b/DBUtils/Docs/persist.gif deleted file mode 100644 index 43c3d01..0000000 Binary files a/DBUtils/Docs/persist.gif and /dev/null differ diff --git a/DBUtils/Docs/pgdep.gif b/DBUtils/Docs/pgdep.gif deleted file mode 100644 index 9bdface..0000000 Binary files a/DBUtils/Docs/pgdep.gif and /dev/null differ diff --git a/DBUtils/Docs/pool.gif b/DBUtils/Docs/pool.gif deleted file mode 100644 index 105b3c8..0000000 Binary files a/DBUtils/Docs/pool.gif and /dev/null differ diff --git a/DBUtils/Examples/DBUtilsExample.py b/DBUtils/Examples/DBUtilsExample.py deleted file mode 100644 index 8e40172..0000000 --- a/DBUtils/Examples/DBUtilsExample.py +++ /dev/null @@ -1,455 +0,0 @@ - -from MiscUtils.Configurable import Configurable -from WebKit.Examples.ExamplePage import ExamplePage - - -class DBConfig(Configurable): - """Database configuration.""" - - def defaultConfig(self): - return { - 'dbapi': 'pg', - 'database': 'demo', - 'user': 'demo', - 'password': 'demo', - 'mincached': 5, - 'maxcached': 25 - } - - def configFilename(self): - return 'Configs/Database.config' - - -# the database tables used in this example: -tables = ( - '''seminars ( - id varchar(4) primary key, - title varchar(64) unique not null, - cost money, - places_left smallint)''', - '''attendees ( - name varchar(64) not null, - seminar varchar(4), - paid boolean, - primary key(name, seminar), - foreign key (seminar) references seminars(id) on delete cascade)''') - - -class DBUtilsExample(ExamplePage): - """Example page for the DBUtils package.""" - - # Initialize the database class once when this class is loaded: - config = DBConfig().config() - if config.get('maxcached', None) is None: - dbmod_name = 'Persistent' - else: - dbmod_name = 'Pooled' - dbapi_name = config.pop('dbapi', 'pg') - if dbapi_name == 'pg': # use the PyGreSQL classic DB API - dbmod_name += 'Pg' - if 'database' in config: - config['dbname'] = config['database'] - del config['database'] - if 'password' in config: - config['passwd'] = config['password'] - del config['password'] - else: # use a DB-API 2 compliant module - dbmod_name += 'DB' - dbapi = dbmod = dbclass = dbstatus = None - try: - dbapi = __import__(dbapi_name) - try: - dbmod = getattr(__import__('DBUtils.' + dbmod_name), dbmod_name) - try: - if dbapi_name != 'pg': - config['creator'] = dbapi - dbclass = getattr(dbmod, dbmod_name)(**config) - except dbapi.Error as error: - dbstatus = str(error) - except Exception: - dbstatus = 'Could not connect to the database.' - except Exception: - dbstatus = 'Could not import DBUtils.%s.' % dbmod_name - except Exception: - dbstatus = 'Could not import %s.' % dbapi_name - - # Initialize the buttons - _actions = [] - _buttons = [] - for action in ( - 'create tables', 'list seminars', 'list attendees', - 'add seminar', 'add attendee'): - value = action.capitalize() - action = action.split() - action[1] = action[1].capitalize() - action = ''.join(action) - _actions.append(action) - _buttons.append( - '<input name="_action_%s" type="submit" value="%s">' - % (action, value)) - _buttons = tuple(_buttons) - - def title(self): - return "DBUtils Example" - - def actions(self): - return ExamplePage.actions(self) + self._actions - - def awake(self, transaction): - ExamplePage.awake(self, transaction) - self._output = [] - - def postAction(self, actionName): - self.writeBody() - del self._output - ExamplePage.postAction(self, actionName) - - def output(self, s): - self._output.append(s) - - def outputMsg(self, msg, error=False): - self._output.append( - '<p style="color:%s">%s</p>' % ('red' if error else 'green', msg)) - - def connection(self, shareable=True): - if self.dbstatus: - error = self.dbstatus - else: - try: - if self.dbmod_name == 'PooledDB': - return self.dbclass.connection(shareable) - else: - return self.dbclass.connection() - except self.dbapi.Error as error: - error = str(error) - except Exception: - error = 'Cannot connect to the database.' - self.outputMsg(error, True) - - def dedicated_connection(self): - return self.connection(False) - - def sqlEncode(self, s): - if s is None: - return 'null' - s = s.replace('\\', '\\\\').replace('\'', '\\\'') - return "'%s'" % s - - def createTables(self): - db = self.dedicated_connection() - if not db: - return - for table in tables: - self._output.append( - '<p>Creating the following table:</p><pre>%s</pre>' % table) - ddl = 'create table ' + table - try: - if self.dbapi_name == 'pg': - db.query(ddl) - else: - db.cursor().execute(ddl) - db.commit() - except self.dbapi.Error as error: - if self.dbapi_name != 'pg': - db.rollback() - self.outputMsg(error, True) - else: - self.outputMsg('The table was successfully created.') - db.close() - - def listSeminars(self): - id = self.request().field('id', None) - if id: - if not isinstance(id, list): - id = [id] - cmd = ','.join(map(self.sqlEncode, id)) - cmd = 'delete from seminars where id in (%s)' % cmd - db = self.dedicated_connection() - if not db: - return - try: - if self.dbapi_name == 'pg': - db.query('begin') - db.query(cmd) - db.query('end') - else: - db.cursor().execute(cmd) - db.commit() - except self.dbapi.Error as error: - try: - if self.dbapi_name == 'pg': - db.query('end') - else: - db.rollback() - except Exception: - pass - self.outputMsg(error, True) - return - else: - self.outputMsg('Entries deleted: %d' % len(id)) - db = self.connection() - if not db: - return - query = ('select id, title, cost, places_left' - ' from seminars order by title') - try: - if self.dbapi_name == 'pg': - result = db.query(query).getresult() - else: - cursor = db.cursor() - cursor.execute(query) - result = cursor.fetchall() - cursor.close() - except self.dbapi.Error as error: - self.outputMsg(error, True) - return - if not result: - self.outputMsg('There are no seminars in the database.', True) - return - wr = self.output - button = self._buttons[1].replace('List seminars', 'Delete') - wr('<h4>List of seminars in the database:</h4>') - wr('<form action=""><table border="1" cellspacing="0" cellpadding="2">' - '<tr><th>ID</th><th>Seminar title</th><th>Cost</th>' - '<th>Places left</th><th>%s</th></tr>' % button) - for id, title, cost, places in result: - if places is None: - places = 'unlimited' - if not cost: - cost = 'free' - wr('<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>' - '<input type="checkbox" name="id" value="%s">' - '</td></tr>' % (id, title, cost, places, id)) - wr('</table></form>') - - def listAttendees(self): - id = self.request().field('id', None) - if id: - if not isinstance(id, list): - id = [id] - cmds = [ - 'delete from attendees where rpad(seminar,4)||name in (%s)' - % ','.join(map(self.sqlEncode, id))] - places = {} - for i in id: - i = i[:4].rstrip() - if i in places: - places[i] += 1 - else: - places[i] = 1 - for i, n in places.items(): - cmds.append( - 'update seminars set places_left=places_left+%d' - ' where id=%s' % (n, self.sqlEncode(i))) - db = self.dedicated_connection() - if not db: - return - try: - if self.dbapi_name == 'pg': - db.query('begin') - for cmd in cmds: - db.query(cmd) - db.query('end') - else: - for cmd in cmds: - db.cursor().execute(cmd) - db.commit() - except self.dbapi.Error as error: - if self.dbapi_name == 'pg': - db.query('end') - else: - db.rollback() - self.outputMsg(error, True) - return - else: - self.outputMsg('Entries deleted: %d' % len(id)) - db = self.connection() - if not db: - return - query = ('select a.name, s.id, s.title, a.paid' - ' from attendees a,seminars s' - ' where s.id=a.seminar order by a.name, s.title') - try: - if self.dbapi_name == 'pg': - result = db.query(query).getresult() - else: - cursor = db.cursor() - cursor.execute(query) - result = cursor.fetchall() - cursor.close() - except self.dbapi.Error as error: - self.outputMsg(error, True) - return - if not result: - self.outputMsg('There are no attendees in the database.', True) - return - wr = self.output - button = self._buttons[2].replace('List attendees', 'Delete') - wr('<h4>List of attendees in the database:</h4>') - wr('<form action=""><table border="1" cellspacing="0" cellpadding="2">' - '<tr><th>Name</th><th>Seminar</th><th>Paid</th>' - '<th>%s</th></tr>' % button) - for name, id, title, paid in result: - paid = 'Yes' if paid else 'No' - id = id.ljust(4) + name - wr('<tr><td>%s</td><td>%s</td><td>%s</td>' - '<td><input type="checkbox" name="id" value="%s"></td>' - '</tr>' % (name, title, paid, id)) - wr('</table></form>') - - def addSeminar(self): - wr = self.output - wr('<h4>Add a seminar entry to the database:</h4>') - wr('<form action=""><table>' - '<tr><th>ID</th><td><input name="id" type="text" ' - 'size="4" maxlength="4"></td></tr>' - '<tr><th>Title</th><td><input name="title" type="text" ' - 'size="40" maxlength="64"></td></tr>' - '<tr><th>Cost</th><td><input name="cost" type="text" ' - 'size="20" maxlength="20"></td></tr>' - '<tr><th>Places</th><td><input name="places" type="text" ' - 'size="20" maxlength="20"></td></tr>' - '<tr><td colspan="2" align="right">%s</td></tr>' - '</table></form>' % self._buttons[3]) - request = self.request() - if not request.hasField('id'): - return - values = [] - for name in ('id', 'title', 'cost', 'places'): - values.append(request.field(name, '').strip()) - if not values[0] or not values[1]: - self.outputMsg('You must enter a seminar ID and a title!') - return - if not values[2]: - values[2] = None - if not values[3]: - values[3] = None - db = self.dedicated_connection() - if not db: - return - cmd = ('insert into seminars values (%s,%s,%s,%s)' - % tuple(map(self.sqlEncode, values))) - try: - if self.dbapi_name == 'pg': - db.query('begin') - db.query(cmd) - db.query('end') - else: - db.cursor().execute(cmd) - db.commit() - except self.dbapi.Error as error: - if self.dbapi_name == 'pg': - db.query('end') - else: - db.rollback() - self.outputMsg(error, True) - else: - self.outputMsg('"%s" added to seminars.' % values[1]) - db.close() - - def addAttendee(self): - db = self.connection() - if not db: - return - query = ('select id, title from seminars' - ' where places_left is null or places_left>0 order by title') - try: - if self.dbapi_name == 'pg': - result = db.query(query).getresult() - else: - cursor = db.cursor() - cursor.execute(query) - result = cursor.fetchall() - cursor.close() - except self.dbapi.Error as error: - self.outputMsg(error, True) - return - if not result: - self.outputMsg('You have to define seminars first.') - return - sem = ['<select name="seminar" size="1">'] - for id, title in result: - sem.append('<option value="%s">%s</option>' % (id, title)) - sem.append('</select>') - sem = ''.join(sem) - wr = self.output - wr('<h4>Add an attendee entry to the database:</h4>') - wr('<form action=""><table>' - '<tr><th>Name</th><td><input name="name" type="text" ' - 'size="40" maxlength="64"></td></tr>' - '<tr><th>Seminar</th><td>%s</td></tr>' - '<tr><th>Paid</th><td>' - '<input type="radio" name="paid" value="t">Yes ' - '<input type="radio" name="paid" value="f" checked="checked">No' - '</td></tr><tr><td colspan="2" align="right">%s</td></tr>' - '</table></form>' % (sem, self._buttons[4])) - request = self.request() - if not request.hasField('name'): - return - values = [] - for name in ('name', 'seminar', 'paid'): - values.append(request.field(name, '').strip()) - if not values[0] or not values[1]: - self.outputMsg('You must enter a name and a seminar!') - return - db = self.dedicated_connection() - if not db: - return - try: - if self.dbapi_name == 'pg': - db.query('begin') - cmd = ('update seminars set places_left=places_left-1' - ' where id=%s' % self.sqlEncode(values[1])) - db.query(cmd) - cmd = ('select places_left from seminars' - ' where id=%s' % self.sqlEncode(values[1])) - if (db.query(cmd).getresult()[0][0] or 0) < 0: - raise self.dbapi.Error("No more places left.") - cmd = ('insert into attendees values (%s,%s,%s)' - % tuple(map(self.sqlEncode, values))) - db.query(cmd) - db.query('end') - else: - cursor = db.cursor() - cmd = ('update seminars set places_left=places_left-1' - ' where id=%s' % self.sqlEncode(values[1])) - cursor.execute(cmd) - cmd = ('select places_left from seminars' - ' where id=%s' % self.sqlEncode(values[1])) - cursor.execute(cmd) - if (cursor.fetchone()[0] or 0) < 0: - raise self.dbapi.Error("No more places left.") - cmd = ('insert into attendees values (%s,%s,%s)' - % tuple(map(self.sqlEncode, values))) - db.cursor().execute(cmd) - cursor.close() - db.commit() - except self.dbapi.Error as error: - if self.dbapi_name == 'pg': - db.query('end') - else: - db.rollback() - self.outputMsg(error, True) - else: - self.outputMsg('%s added to attendees.' % values[0]) - db.close() - - def writeContent(self): - wr = self.writeln - if self._output: - wr('\n'.join(self._output)) - wr('<p><a href="DBUtilsExample">Back</a></p>') - else: - wr('<h2>Welcome to the %s!</h2>' % self.title()) - wr('<h4>We are using DBUtils.%s and the %s database module.</h4>' - % (self.dbmod_name, self.dbapi_name)) - wr('<p>Configuration: %r</p>' % DBConfig().config()) - wr('<p>This example uses a small demo database ' - 'designed to track the attendees for a series of seminars ' - '(see <a href="http://www.linuxjournal.com/article/2605">"' - 'The Python DB-API"</a> by Andrew Kuchling).</p>') - wr('<form action="">' - '<p>%s (create the needed database tables first)</p>' - '<p>%s %s (list all database entries)</p>' - '<p>%s %s (add entries)</p>' - '</form>' % self._buttons) diff --git a/DBUtils/Examples/Main.py b/DBUtils/Examples/Main.py deleted file mode 100644 index 39bf4b6..0000000 --- a/DBUtils/Examples/Main.py +++ /dev/null @@ -1,21 +0,0 @@ - -from WebKit.Examples.ExamplePage import ExamplePage - - -class Main(ExamplePage): - - def writeContent(self): - self.writeln('''<h2>DBUtils example</h2> -<p>You can set the DBUtils parameters in the following file</p> -<ul> -<li><tt>Configs/Database.config</tt></li> -</ul> -<p>With the default settings,</p> -<ul> -<li>you must have the PostgreSQL database</li> -<li>and the PyGreSQL adapter installed, and</li> -<li>you must have created a database with the name "demo" and</li> -<li>a database user with the name "demo" and password "demo".</li> -</ul> -<p><a href="DBUtilsExample">Start the demo!</a></p> -''') diff --git a/DBUtils/Examples/__init__.py b/DBUtils/Examples/__init__.py deleted file mode 100644 index 71f8157..0000000 --- a/DBUtils/Examples/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# DBUtils Examples diff --git a/DBUtils/Properties.py b/DBUtils/Properties.py deleted file mode 100644 index da7426e..0000000 --- a/DBUtils/Properties.py +++ /dev/null @@ -1,19 +0,0 @@ -name = 'DBUtils' - -version = (1, 4, 0) - -docs = [ - {'name': "User's Guide", 'file': 'UsersGuide.html'}, -] - -status = 'beta' - -requiredPyVersion = (2, 7, 0) - -synopsis = ( - "DBUtils provides database related support classes and functions" - " to Webware. There is plenty of useful reusable code here.") - -WebKitConfig = { - 'examplePages': ['DBUtilsExample'] -} diff --git a/DBUtils/Tests/TestPersistentDB.py b/DBUtils/Tests/TestPersistentDB.py deleted file mode 100644 index 6f7ec53..0000000 --- a/DBUtils/Tests/TestPersistentDB.py +++ /dev/null @@ -1,296 +0,0 @@ -"""Test the PersistentDB module. - -Note: -We don't test performance here, so the test does not predicate -whether PersistentDB actually will help in improving performance or not. -We also assume that the underlying SteadyDB connections are tested. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_db as dbapi - -from DBUtils.PersistentDB import PersistentDB, local - -__version__ = '1.4' - - -class TestPersistentDB(unittest.TestCase): - - def setUp(self): - dbapi.threadsafety = 1 - - def test0_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.PersistentDB import __version__ as PersistentDBVersion - self.assertEqual(PersistentDBVersion, __version__) - self.assertEqual(PersistentDB.version, __version__) - - def test1_NoThreadsafety(self): - from DBUtils.PersistentDB import NotSupportedError - for dbapi.threadsafety in (None, 0): - self.assertRaises(NotSupportedError, PersistentDB, dbapi) - - def test2_Close(self): - for closeable in (False, True): - persist = PersistentDB(dbapi, closeable=closeable) - db = persist.connection() - self.assertTrue(db._con.valid) - db.close() - self.assertTrue(closeable ^ db._con.valid) - db.close() - self.assertTrue(closeable ^ db._con.valid) - db._close() - self.assertFalse(db._con.valid) - db._close() - self.assertFalse(db._con.valid) - - def test3_Connection(self): - persist = PersistentDB(dbapi) - db = persist.connection() - db_con = db._con - self.assertIsNone(db_con.database) - self.assertIsNone(db_con.user) - db2 = persist.connection() - self.assertEqual(db, db2) - db3 = persist.dedicated_connection() - self.assertEqual(db, db3) - db3.close() - db2.close() - db.close() - - def test4_Threads(self): - numThreads = 3 - persist = PersistentDB(dbapi, closeable=True) - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queryQueue, resultQueue = [], [] - for i in range(numThreads): - queryQueue.append(Queue(1)) - resultQueue.append(Queue(1)) - - def runQueries(i): - this_db = persist.connection() - while 1: - try: - try: - q = queryQueue[i].get(1, 1) - except TypeError: - q = queryQueue[i].get(1) - except Empty: - q = None - if not q: - break - db = persist.connection() - if db != this_db: - r = 'error - not persistent' - else: - if q == 'ping': - r = 'ok - thread alive' - elif q == 'close': - db.close() - r = 'ok - connection closed' - else: - cursor = db.cursor() - cursor.execute(q) - r = cursor.fetchone() - cursor.close() - r = '%d(%d): %s' % (i, db._usage, r) - try: - resultQueue[i].put(r, 1, 1) - except TypeError: - resultQueue[i].put(r, 1) - db.close() - - from threading import Thread - threads = [] - for i in range(numThreads): - thread = Thread(target=runQueries, args=(i,)) - threads.append(thread) - thread.start() - for i in range(numThreads): - try: - queryQueue[i].put('ping', 1, 1) - except TypeError: - queryQueue[i].put('ping', 1) - for i in range(numThreads): - try: - r = resultQueue[i].get(1, 1) - except TypeError: - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(0): ok - thread alive' % i) - self.assertTrue(threads[i].is_alive()) - for i in range(numThreads): - for j in range(i + 1): - try: - queryQueue[i].put('select test%d' % j, 1, 1) - r = resultQueue[i].get(1, 1) - except TypeError: - queryQueue[i].put('select test%d' % j, 1) - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(%d): test%d' % (i, j + 1, j)) - try: - queryQueue[1].put('select test4', 1, 1) - except TypeError: - queryQueue[1].put('select test4', 1) - try: - r = resultQueue[1].get(1, 1) - except TypeError: - r = resultQueue[1].get(1) - self.assertEqual(r, '1(3): test4') - try: - queryQueue[1].put('close', 1, 1) - r = resultQueue[1].get(1, 1) - except TypeError: - queryQueue[1].put('close', 1) - r = resultQueue[1].get(1) - self.assertEqual(r, '1(3): ok - connection closed') - for j in range(2): - try: - queryQueue[1].put('select test%d' % j, 1, 1) - r = resultQueue[1].get(1, 1) - except TypeError: - queryQueue[1].put('select test%d' % j, 1) - r = resultQueue[1].get(1) - self.assertEqual(r, '1(%d): test%d' % (j + 1, j)) - for i in range(numThreads): - self.assertTrue(threads[i].is_alive()) - try: - queryQueue[i].put('ping', 1, 1) - except TypeError: - queryQueue[i].put('ping', 1) - for i in range(numThreads): - try: - r = resultQueue[i].get(1, 1) - except TypeError: - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(%d): ok - thread alive' % (i, i + 1)) - self.assertTrue(threads[i].is_alive()) - for i in range(numThreads): - try: - queryQueue[i].put(None, 1, 1) - except TypeError: - queryQueue[i].put(None, 1) - - def test5_MaxUsage(self): - persist = PersistentDB(dbapi, 20) - db = persist.connection() - self.assertEqual(db._maxusage, 20) - for i in range(100): - cursor = db.cursor() - cursor.execute('select test%d' % i) - r = cursor.fetchone() - cursor.close() - self.assertEqual(r, 'test%d' % i) - self.assertTrue(db._con.valid) - j = i % 20 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db._con.num_uses, j) - self.assertEqual(db._con.num_queries, j) - - def test6_SetSession(self): - persist = PersistentDB(dbapi, 3, ('set datestyle',)) - db = persist.connection() - self.assertEqual(db._maxusage, 3) - self.assertEqual(db._setsession_sql, ('set datestyle',)) - self.assertEqual(db._con.session, ['datestyle']) - cursor = db.cursor() - cursor.execute('set test') - cursor.fetchone() - cursor.close() - for i in range(3): - self.assertEqual(db._con.session, ['datestyle', 'test']) - cursor = db.cursor() - cursor.execute('select test') - cursor.fetchone() - cursor.close() - self.assertEqual(db._con.session, ['datestyle']) - - def test7_ThreadLocal(self): - persist = PersistentDB(dbapi) - self.assertTrue(isinstance(persist.thread, local)) - - class threadlocal: - pass - - persist = PersistentDB(dbapi, threadlocal=threadlocal) - self.assertTrue(isinstance(persist.thread, threadlocal)) - - def test8_PingCheck(self): - Connection = dbapi.Connection - Connection.has_ping = True - Connection.num_pings = 0 - persist = PersistentDB(dbapi, 0, None, None, 0, True) - db = persist.connection() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 0) - db.close() - db = persist.connection() - self.assertFalse(db._con.valid) - self.assertEqual(Connection.num_pings, 0) - persist = PersistentDB(dbapi, 0, None, None, 1, True) - db = persist.connection() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 1) - db.close() - db = persist.connection() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 2) - persist = PersistentDB(dbapi, 0, None, None, 2, True) - db = persist.connection() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 2) - db.close() - db = persist.connection() - self.assertFalse(db._con.valid) - self.assertEqual(Connection.num_pings, 2) - cursor = db.cursor() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 3) - cursor.execute('select test') - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 3) - persist = PersistentDB(dbapi, 0, None, None, 4, True) - db = persist.connection() - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 3) - db.close() - db = persist.connection() - self.assertFalse(db._con.valid) - self.assertEqual(Connection.num_pings, 3) - cursor = db.cursor() - db._con.close() - self.assertFalse(db._con.valid) - self.assertEqual(Connection.num_pings, 3) - cursor.execute('select test') - self.assertTrue(db._con.valid) - self.assertEqual(Connection.num_pings, 4) - Connection.has_ping = False - Connection.num_pings = 0 - - def test9_FailedTransaction(self): - persist = PersistentDB(dbapi) - db = persist.connection() - cursor = db.cursor() - db._con.close() - cursor.execute('select test') - db.begin() - db._con.close() - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test') - cursor.execute('select test') - db.begin() - db.cancel() - db._con.close() - cursor.execute('select test') - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestPersistentPg.py b/DBUtils/Tests/TestPersistentPg.py deleted file mode 100644 index f5ba411..0000000 --- a/DBUtils/Tests/TestPersistentPg.py +++ /dev/null @@ -1,199 +0,0 @@ -"""Test the PersistentPg module. - -Note: -We don't test performance here, so the test does not predicate -whether PersistentPg actually will help in improving performance or not. -We also assume that the underlying SteadyPg connections are tested. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_pg as pg - -from DBUtils.PersistentPg import PersistentPg - -__version__ = '1.4' - - -class TestPersistentPg(unittest.TestCase): - - def test0_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.PersistentPg import __version__ as PersistentPgVersion - self.assertEqual(PersistentPgVersion, __version__) - self.assertEqual(PersistentPg.version, __version__) - - def test1_Close(self): - for closeable in (False, True): - persist = PersistentPg(closeable=closeable) - db = persist.connection() - self.assertTrue(db._con.db and db._con.valid) - db.close() - self.assertTrue( - closeable ^ (db._con.db is not None and db._con.valid)) - db.close() - self.assertTrue( - closeable ^ (db._con.db is not None and db._con.valid)) - db._close() - self.assertFalse(db._con.db and db._con.valid) - db._close() - self.assertFalse(db._con.db and db._con.valid) - - def test2_Threads(self): - numThreads = 3 - persist = PersistentPg() - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queryQueue, resultQueue = [], [] - for i in range(numThreads): - queryQueue.append(Queue(1)) - resultQueue.append(Queue(1)) - - def runQueries(i): - this_db = persist.connection().db - while 1: - try: - try: - q = queryQueue[i].get(1, 1) - except TypeError: - q = queryQueue[i].get(1) - except Empty: - q = None - if not q: - break - db = persist.connection() - if db.db != this_db: - r = 'error - not persistent' - else: - if q == 'ping': - r = 'ok - thread alive' - elif q == 'close': - db.db.close() - r = 'ok - connection closed' - else: - r = db.query(q) - r = '%d(%d): %s' % (i, db._usage, r) - try: - resultQueue[i].put(r, 1, 1) - except TypeError: - resultQueue[i].put(r, 1) - db.close() - - from threading import Thread - threads = [] - for i in range(numThreads): - thread = Thread(target=runQueries, args=(i,)) - threads.append(thread) - thread.start() - for i in range(numThreads): - try: - queryQueue[i].put('ping', 1, 1) - except TypeError: - queryQueue[i].put('ping', 1) - for i in range(numThreads): - try: - r = resultQueue[i].get(1, 1) - except TypeError: - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(0): ok - thread alive' % i) - self.assertTrue(threads[i].is_alive()) - for i in range(numThreads): - for j in range(i + 1): - try: - queryQueue[i].put('select test%d' % j, 1, 1) - r = resultQueue[i].get(1, 1) - except TypeError: - queryQueue[i].put('select test%d' % j, 1) - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(%d): test%d' % (i, j + 1, j)) - try: - queryQueue[1].put('select test4', 1, 1) - r = resultQueue[1].get(1, 1) - except TypeError: - queryQueue[1].put('select test4', 1) - r = resultQueue[1].get(1) - self.assertEqual(r, '1(3): test4') - try: - queryQueue[1].put('close', 1, 1) - r = resultQueue[1].get(1, 1) - except TypeError: - queryQueue[1].put('close', 1) - r = resultQueue[1].get(1) - self.assertEqual(r, '1(3): ok - connection closed') - for j in range(2): - try: - queryQueue[1].put('select test%d' % j, 1, 1) - r = resultQueue[1].get(1, 1) - except TypeError: - queryQueue[1].put('select test%d' % j, 1) - r = resultQueue[1].get(1) - self.assertEqual(r, '1(%d): test%d' % (j + 1, j)) - for i in range(numThreads): - self.assertTrue(threads[i].is_alive()) - try: - queryQueue[i].put('ping', 1, 1) - except TypeError: - queryQueue[i].put('ping', 1) - for i in range(numThreads): - try: - r = resultQueue[i].get(1, 1) - except TypeError: - r = resultQueue[i].get(1) - self.assertEqual(r, '%d(%d): ok - thread alive' % (i, i + 1)) - self.assertTrue(threads[i].is_alive()) - for i in range(numThreads): - try: - queryQueue[i].put(None, 1, 1) - except TypeError: - queryQueue[i].put(None, 1) - - def test3_MaxUsage(self): - persist = PersistentPg(20) - db = persist.connection() - self.assertEqual(db._maxusage, 20) - for i in range(100): - r = db.query('select test%d' % i) - self.assertEqual(r, 'test%d' % i) - self.assertTrue(db.db.status) - j = i % 20 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db.num_queries, j) - - def test4_SetSession(self): - persist = PersistentPg(3, ('set datestyle',)) - db = persist.connection() - self.assertEqual(db._maxusage, 3) - self.assertEqual(db._setsession_sql, ('set datestyle',)) - self.assertEqual(db.db.session, ['datestyle']) - db.query('set test') - for i in range(3): - self.assertEqual(db.db.session, ['datestyle', 'test']) - db.query('select test') - self.assertEqual(db.db.session, ['datestyle']) - - def test5_FailedTransaction(self): - persist = PersistentPg() - db = persist.connection() - db._con.close() - self.assertEqual(db.query('select test'), 'test') - db.begin() - db._con.close() - self.assertRaises(pg.InternalError, db.query, 'select test') - self.assertEqual(db.query('select test'), 'test') - db.begin() - self.assertEqual(db.query('select test'), 'test') - db.rollback() - db._con.close() - self.assertEqual(db.query('select test'), 'test') - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestPooledDB.py b/DBUtils/Tests/TestPooledDB.py deleted file mode 100644 index 04ee5b8..0000000 --- a/DBUtils/Tests/TestPooledDB.py +++ /dev/null @@ -1,1238 +0,0 @@ -"""Test the PooledDB module. - -Note: -We don't test performance here, so the test does not predicate -whether PooledDB actually will help in improving performance or not. -We also assume that the underlying SteadyDB connections are tested. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_db as dbapi - -from DBUtils.PooledDB import ( - PooledDB, SharedDBConnection, InvalidConnection, TooManyConnections) - -__version__ = '1.4' - - -class TestPooledDB(unittest.TestCase): - - def test00_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.PooledDB import __version__ as PooledDBVersion - self.assertEqual(PooledDBVersion, __version__) - self.assertEqual(PooledDB.version, __version__) - - def test01_NoThreadsafety(self): - from DBUtils.PooledDB import NotSupportedError - for threadsafety in (None, 0): - dbapi.threadsafety = threadsafety - self.assertRaises(NotSupportedError, PooledDB, dbapi) - - def test02_Threadsafety(self): - for threadsafety in (1, 2, 3): - dbapi.threadsafety = threadsafety - pool = PooledDB(dbapi, 0, 0, 1) - self.assertTrue(hasattr(pool, '_maxshared')) - if threadsafety > 1: - self.assertEqual(pool._maxshared, 1) - self.assertTrue(hasattr(pool, '_shared_cache')) - else: - self.assertEqual(pool._maxshared, 0) - self.assertFalse(hasattr(pool, '_shared_cache')) - - def test03_CreateConnection(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB( - dbapi, 1, 1, 1, 0, False, None, None, True, None, None, - 'PooledDBTestDB', user='PooledDBTestUser') - self.assertTrue(hasattr(pool, '_idle_cache')) - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertTrue(hasattr(pool, '_shared_cache')) - self.assertEqual(len(pool._shared_cache), 0) - else: - self.assertFalse(hasattr(pool, '_shared_cache')) - self.assertTrue(hasattr(pool, '_maxusage')) - self.assertIsNone(pool._maxusage) - self.assertTrue(hasattr(pool, '_setsession')) - self.assertIsNone(pool._setsession) - con = pool._idle_cache[0] - from DBUtils.SteadyDB import SteadyDBConnection - self.assertTrue(isinstance(con, SteadyDBConnection)) - self.assertTrue(hasattr(con, '_maxusage')) - self.assertEqual(con._maxusage, 0) - self.assertTrue(hasattr(con, '_setsession_sql')) - self.assertIsNone(con._setsession_sql) - db = pool.connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - self.assertTrue(hasattr(db, '_con')) - self.assertEqual(db._con, con) - self.assertTrue(hasattr(db, 'cursor')) - self.assertTrue(hasattr(db, '_usage')) - self.assertEqual(db._usage, 0) - self.assertTrue(hasattr(con, '_con')) - db_con = con._con - self.assertTrue(hasattr(db_con, 'database')) - self.assertEqual(db_con.database, 'PooledDBTestDB') - self.assertTrue(hasattr(db_con, 'user')) - self.assertEqual(db_con.user, 'PooledDBTestUser') - self.assertTrue(hasattr(db_con, 'open_cursors')) - self.assertEqual(db_con.open_cursors, 0) - self.assertTrue(hasattr(db_con, 'num_uses')) - self.assertEqual(db_con.num_uses, 0) - self.assertTrue(hasattr(db_con, 'num_queries')) - self.assertEqual(db_con.num_queries, 0) - cursor = db.cursor() - self.assertEqual(db_con.open_cursors, 1) - cursor.execute('select test') - r = cursor.fetchone() - cursor.close() - self.assertEqual(db_con.open_cursors, 0) - self.assertEqual(r, 'test') - self.assertEqual(db_con.num_queries, 1) - self.assertEqual(db._usage, 1) - cursor = db.cursor() - self.assertEqual(db_con.open_cursors, 1) - cursor.execute('set sessiontest') - cursor2 = db.cursor() - self.assertEqual(db_con.open_cursors, 2) - cursor2.close() - self.assertEqual(db_con.open_cursors, 1) - cursor.close() - self.assertEqual(db_con.open_cursors, 0) - self.assertEqual(db_con.num_queries, 1) - self.assertEqual(db._usage, 2) - self.assertEqual( - db_con.session, ['rollback', 'sessiontest']) - pool = PooledDB(dbapi, 1, 1, 1) - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection(True) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection(False) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - self.assertEqual(db._usage, 0) - db_con = db._con._con - self.assertIsNone(db_con.database) - self.assertIsNone(db_con.user) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.dedicated_connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - self.assertEqual(db._usage, 0) - db_con = db._con._con - self.assertIsNone(db_con.database) - self.assertIsNone(db_con.user) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - pool = PooledDB(dbapi, 0, 0, 0, 0, False, 3, ('set datestyle',)) - self.assertEqual(pool._maxusage, 3) - self.assertEqual(pool._setsession, ('set datestyle',)) - con = pool.connection()._con - self.assertEqual(con._maxusage, 3) - self.assertEqual(con._setsession_sql, ('set datestyle',)) - - def test04_CloseConnection(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB( - dbapi, 0, 1, 1, 0, False, None, None, True, None, None, - 'PooledDBTestDB', user='PooledDBTestUser') - self.assertTrue(hasattr(pool, '_idle_cache')) - self.assertEqual(len(pool._idle_cache), 0) - db = pool.connection() - self.assertTrue(hasattr(db, '_con')) - con = db._con - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - self.assertTrue(hasattr(db, '_shared_con')) - shared_con = db._shared_con - self.assertEqual(pool._shared_cache[0], shared_con) - self.assertTrue(hasattr(shared_con, 'shared')) - self.assertEqual(shared_con.shared, 1) - self.assertTrue(hasattr(shared_con, 'con')) - self.assertEqual(shared_con.con, con) - from DBUtils.SteadyDB import SteadyDBConnection - self.assertTrue(isinstance(con, SteadyDBConnection)) - self.assertTrue(hasattr(con, '_con')) - db_con = con._con - self.assertTrue(hasattr(db_con, 'num_queries')) - self.assertEqual(db._usage, 0) - self.assertEqual(db_con.num_queries, 0) - db.cursor().execute('select test') - self.assertEqual(db._usage, 1) - self.assertEqual(db_con.num_queries, 1) - db.close() - self.assertIsNone(db._con) - if shareable: - self.assertIsNone(db._shared_con) - self.assertEqual(shared_con.shared, 0) - self.assertRaises(InvalidConnection, getattr, db, '_usage') - self.assertFalse(hasattr(db_con, '_num_queries')) - self.assertEqual(len(pool._idle_cache), 1) - self.assertEqual(pool._idle_cache[0]._con, db_con) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db.close() - if shareable: - self.assertEqual(shared_con.shared, 0) - db = pool.connection() - self.assertEqual(db._con, con) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - shared_con = db._shared_con - self.assertEqual(pool._shared_cache[0], shared_con) - self.assertEqual(shared_con.con, con) - self.assertEqual(shared_con.shared, 1) - self.assertEqual(db._usage, 1) - self.assertEqual(db_con.num_queries, 1) - self.assertTrue(hasattr(db_con, 'database')) - self.assertEqual(db_con.database, 'PooledDBTestDB') - self.assertTrue(hasattr(db_con, 'user')) - self.assertEqual(db_con.user, 'PooledDBTestUser') - db.cursor().execute('select test') - self.assertEqual(db_con.num_queries, 2) - db.cursor().execute('select test') - self.assertEqual(db_con.num_queries, 3) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - self.assertEqual(pool._idle_cache[0]._con, db_con) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection(False) - self.assertEqual(db._con, con) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db.close() - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - - def test05_CloseAll(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 10) - self.assertEqual(len(pool._idle_cache), 10) - pool.close() - self.assertEqual(len(pool._idle_cache), 0) - pool = PooledDB(dbapi, 10) - closed = ['no'] - - def close(what=closed): - what[0] = 'yes' - - pool._idle_cache[7]._con.close = close - self.assertEqual(closed, ['no']) - del pool - self.assertEqual(closed, ['yes']) - pool = PooledDB(dbapi, 10, 10, 5) - self.assertEqual(len(pool._idle_cache), 10) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - cache = [] - for i in range(5): - cache.append(pool.connection()) - self.assertEqual(len(pool._idle_cache), 5) - if shareable: - self.assertEqual(len(pool._shared_cache), 5) - else: - self.assertEqual(len(pool._idle_cache), 5) - pool.close() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - pool = PooledDB(dbapi, 10, 10, 5) - closed = [] - - def close_idle(what=closed): - what.append('idle') - - def close_shared(what=closed): - what.append('shared') - - if shareable: - cache = [] - for i in range(5): - cache.append(pool.connection()) - pool._shared_cache[3].con.close = close_shared - else: - pool._idle_cache[7]._con.close = close_shared - pool._idle_cache[3]._con.close = close_idle - self.assertEqual(closed, []) - del pool - if shareable: - del cache - self.assertEqual(closed, ['idle', 'shared']) - - def test06_ShareableConnection(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 0, 1, 2) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db1 = pool.connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - db2 = pool.connection() - self.assertNotEqual(db1._con, db2._con) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 2) - db3 = pool.connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 2) - self.assertEqual(db3._con, db1._con) - self.assertEqual(db1._shared_con.shared, 2) - self.assertEqual(db2._shared_con.shared, 1) - else: - self.assertNotEqual(db3._con, db1._con) - self.assertNotEqual(db3._con, db2._con) - db4 = pool.connection() - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 2) - self.assertEqual(db4._con, db2._con) - self.assertEqual(db1._shared_con.shared, 2) - self.assertEqual(db2._shared_con.shared, 2) - else: - self.assertNotEqual(db4._con, db1._con) - self.assertNotEqual(db4._con, db2._con) - self.assertNotEqual(db4._con, db3._con) - db5 = pool.connection(False) - self.assertNotEqual(db5._con, db1._con) - self.assertNotEqual(db5._con, db2._con) - self.assertNotEqual(db5._con, db3._con) - self.assertNotEqual(db5._con, db4._con) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 2) - self.assertEqual(db1._shared_con.shared, 2) - self.assertEqual(db2._shared_con.shared, 2) - db5.close() - self.assertEqual(len(pool._idle_cache), 1) - db5 = pool.connection() - if shareable: - self.assertEqual(len(pool._idle_cache), 1) - self.assertEqual(len(pool._shared_cache), 2) - self.assertEqual(db5._shared_con.shared, 3) - else: - self.assertEqual(len(pool._idle_cache), 0) - pool = PooledDB(dbapi, 0, 0, 1) - self.assertEqual(len(pool._idle_cache), 0) - db1 = pool.connection(False) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db2 = pool.connection() - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - db3 = pool.connection() - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - self.assertEqual(db2._con, db3._con) - else: - self.assertNotEqual(db2._con, db3._con) - del db3 - if shareable: - self.assertEqual(len(pool._idle_cache), 0) - self.assertEqual(len(pool._shared_cache), 1) - else: - self.assertEqual(len(pool._idle_cache), 1) - del db2 - if shareable: - self.assertEqual(len(pool._idle_cache), 1) - self.assertEqual(len(pool._shared_cache), 0) - else: - self.assertEqual(len(pool._idle_cache), 2) - del db1 - if shareable: - self.assertEqual(len(pool._idle_cache), 2) - self.assertEqual(len(pool._shared_cache), 0) - else: - self.assertEqual(len(pool._idle_cache), 3) - - def test08_MinMaxCached(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 3) - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(3)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(6)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 6) - pool = PooledDB(dbapi, 0, 3) - self.assertEqual(len(pool._idle_cache), 0) - cache = [pool.connection() for i in range(3)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(6)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - pool = PooledDB(dbapi, 3, 3) - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(3)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(6)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - pool = PooledDB(dbapi, 3, 2) - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(4)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - pool = PooledDB(dbapi, 2, 5) - self.assertEqual(len(pool._idle_cache), 2) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 5) - pool = PooledDB(dbapi, 1, 2, 3) - self.assertEqual(len(pool._idle_cache), 1) - cache = [pool.connection(False) for i in range(4)] - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 2) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 3) - del cache - self.assertEqual(len(pool._idle_cache), 2) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - pool = PooledDB(dbapi, 1, 3, 2) - self.assertEqual(len(pool._idle_cache), 1) - cache = [pool.connection(False) for i in range(4)] - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - cache = [pool.connection() for i in range(10)] - if shareable: - self.assertEqual(len(pool._idle_cache), 1) - self.assertEqual(len(pool._shared_cache), 2) - else: - self.assertEqual(len(pool._idle_cache), 0) - del cache - self.assertEqual(len(pool._idle_cache), 3) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - - def test08_MaxShared(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi) - self.assertEqual(len(pool._idle_cache), 0) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(cache), 10) - self.assertEqual(len(pool._idle_cache), 0) - pool = PooledDB(dbapi, 1, 1, 0) - self.assertEqual(len(pool._idle_cache), 1) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(cache), 10) - self.assertEqual(len(pool._idle_cache), 0) - pool = PooledDB(dbapi, 0, 0, 1) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(cache), 10) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - pool = PooledDB(dbapi, 1, 1, 1) - self.assertEqual(len(pool._idle_cache), 1) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(cache), 10) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - pool = PooledDB(dbapi, 0, 0, 7) - cache = [pool.connection(False) for i in range(3)] - self.assertEqual(len(cache), 3) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - cache = [pool.connection() for i in range(10)] - self.assertEqual(len(cache), 10) - self.assertEqual(len(pool._idle_cache), 3) - if shareable: - self.assertEqual(len(pool._shared_cache), 7) - - def test09_SortShared(self): - dbapi.threadsafety = 2 - pool = PooledDB(dbapi, 0, 4, 4) - cache = [] - for i in range(6): - db = pool.connection() - db.cursor().execute('select test') - cache.append(db) - for i, db in enumerate(cache): - self.assertEqual(db._shared_con.shared, 1 if 2 <= i < 4 else 2) - cache[2].begin() - cache[3].begin() - db = pool.connection() - self.assertIs(db._con, cache[0]._con) - db.close() - cache[3].rollback() - db = pool.connection() - self.assertIs(db._con, cache[3]._con) - - def test10_EquallyShared(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 5, 5, 5) - self.assertEqual(len(pool._idle_cache), 5) - for i in range(15): - db = pool.connection(False) - db.cursor().execute('select test') - db.close() - self.assertEqual(len(pool._idle_cache), 5) - for i in range(5): - con = pool._idle_cache[i] - self.assertEqual(con._usage, 3) - self.assertEqual(con._con.num_queries, 3) - cache = [] - for i in range(35): - db = pool.connection() - db.cursor().execute('select test') - cache.append(db) - del db - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 5) - for i in range(5): - con = pool._shared_cache[i] - self.assertEqual(con.shared, 7) - con = con.con - self.assertEqual(con._usage, 10) - self.assertEqual(con._con.num_queries, 10) - del cache - self.assertEqual(len(pool._idle_cache), 5) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - - def test11_ManyShared(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 0, 0, 5) - cache = [] - for i in range(35): - db = pool.connection() - db.cursor().execute('select test1') - db.cursor().execute('select test2') - db.cursor().callproc('test3') - cache.append(db) - del db - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 5) - for i in range(5): - con = pool._shared_cache[i] - self.assertEqual(con.shared, 7) - con = con.con - self.assertEqual(con._usage, 21) - self.assertEqual(con._con.num_queries, 14) - cache[3] = cache[8] = cache[33] = None - cache[12] = cache[17] = cache[34] = None - self.assertEqual(len(pool._shared_cache), 5) - self.assertEqual(pool._shared_cache[0].shared, 7) - self.assertEqual(pool._shared_cache[1].shared, 7) - self.assertEqual(pool._shared_cache[2].shared, 5) - self.assertEqual(pool._shared_cache[3].shared, 4) - self.assertEqual(pool._shared_cache[4].shared, 6) - for db in cache: - if db: - db.cursor().callproc('test4') - for i in range(6): - db = pool.connection() - db.cursor().callproc('test4') - cache.append(db) - del db - for i in range(5): - con = pool._shared_cache[i] - self.assertEqual(con.shared, 7) - con = con.con - self.assertEqual(con._usage, 28) - self.assertEqual(con._con.num_queries, 14) - del cache - if shareable: - self.assertEqual(len(pool._idle_cache), 5) - self.assertEqual(len(pool._shared_cache), 0) - else: - self.assertEqual(len(pool._idle_cache), 35) - - def test12_Rollback(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - pool = PooledDB(dbapi, 0, 1) - self.assertEqual(len(pool._idle_cache), 0) - db = pool.connection(False) - self.assertEqual(len(pool._idle_cache), 0) - self.assertEqual(db._con._con.open_cursors, 0) - cursor = db.cursor() - self.assertEqual(db._con._con.open_cursors, 1) - cursor.execute('set doit1') - db.commit() - cursor.execute('set dont1') - cursor.close() - self.assertEqual(db._con._con.open_cursors, 0) - del db - self.assertEqual(len(pool._idle_cache), 1) - db = pool.connection(False) - self.assertEqual(len(pool._idle_cache), 0) - self.assertEqual(db._con._con.open_cursors, 0) - cursor = db.cursor() - self.assertEqual(db._con._con.open_cursors, 1) - cursor.execute('set doit2') - cursor.close() - self.assertEqual(db._con._con.open_cursors, 0) - db.commit() - session = db._con._con.session - db.close() - self.assertEqual(session, [ - 'doit1', 'commit', 'dont1', 'rollback', - 'doit2', 'commit', 'rollback']) - - def test13_MaxConnections(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 1, 2, 2, 3) - self.assertTrue(hasattr(pool, '_maxconnections')) - self.assertEqual(pool._maxconnections, 3) - self.assertTrue(hasattr(pool, '_connections')) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - cache = [] - for i in range(3): - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 3) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - self.assertRaises(TooManyConnections, pool.connection, 0) - self.assertRaises(TooManyConnections, pool.connection) - cache = [] - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 2) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - for i in range(3): - cache.append(pool.connection()) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(pool._connections, 2) - self.assertEqual(len(pool._shared_cache), 2) - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 3) - self.assertEqual(len(pool._shared_cache), 2) - else: - self.assertEqual(pool._connections, 3) - self.assertRaises(TooManyConnections, pool.connection, 0) - if shareable: - cache.append(pool.connection(True)) - self.assertEqual(pool._connections, 3) - else: - self.assertRaises(TooManyConnections, pool.connection) - del cache - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 2) - pool = PooledDB(dbapi, 0, 1, 1, 1) - self.assertEqual(pool._maxconnections, 1) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 0) - db = pool.connection(False) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - self.assertRaises(TooManyConnections, pool.connection, 0) - self.assertRaises(TooManyConnections, pool.connection) - del db - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - cache = [pool.connection()] - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 1) - cache.append(pool.connection()) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._shared_cache), 1) - self.assertEqual(pool._shared_cache[0].shared, 2) - else: - self.assertRaises(TooManyConnections, pool.connection) - self.assertRaises(TooManyConnections, pool.connection, 0) - if shareable: - cache.append(pool.connection(True)) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._shared_cache), 1) - self.assertEqual(pool._shared_cache[0].shared, 3) - else: - self.assertRaises(TooManyConnections, pool.connection, 1) - del cache - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection(False) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - del db - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - pool = PooledDB(dbapi, 1, 2, 2, 1) - self.assertEqual(pool._maxconnections, 2) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - cache = [] - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 2) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - self.assertRaises(TooManyConnections, pool.connection, 0) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledDB(dbapi, 4, 3, 2, 1, False) - self.assertEqual(pool._maxconnections, 4) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 4) - cache = [] - for i in range(4): - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 4) - self.assertEqual(len(pool._idle_cache), 0) - self.assertRaises(TooManyConnections, pool.connection, 0) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledDB(dbapi, 1, 2, 3, 4, False) - self.assertEqual(pool._maxconnections, 4) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - for i in range(4): - cache.append(pool.connection()) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(pool._connections, 3) - self.assertEqual(len(pool._shared_cache), 3) - cache.append(pool.connection()) - self.assertEqual(pool._connections, 3) - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 4) - else: - self.assertEqual(pool._connections, 4) - self.assertRaises(TooManyConnections, pool.connection) - self.assertRaises(TooManyConnections, pool.connection, 0) - pool = PooledDB(dbapi, 0, 0, 3, 3, False) - self.assertEqual(pool._maxconnections, 3) - self.assertEqual(pool._connections, 0) - cache = [] - for i in range(3): - cache.append(pool.connection(False)) - self.assertEqual(pool._connections, 3) - self.assertRaises(TooManyConnections, pool.connection, 0) - self.assertRaises(TooManyConnections, pool.connection, 1) - cache = [] - self.assertEqual(pool._connections, 0) - for i in range(3): - cache.append(pool.connection()) - self.assertEqual(pool._connections, 3) - if shareable: - for i in range(3): - cache.append(pool.connection()) - self.assertEqual(pool._connections, 3) - else: - self.assertRaises(TooManyConnections, pool.connection) - self.assertRaises(TooManyConnections, pool.connection, 0) - pool = PooledDB(dbapi, 0, 0, 3) - self.assertEqual(pool._maxconnections, 0) - self.assertEqual(pool._connections, 0) - cache = [] - for i in range(10): - cache.append(pool.connection(False)) - cache.append(pool.connection()) - if shareable: - self.assertEqual(pool._connections, 13) - self.assertEqual(len(pool._shared_cache), 3) - else: - self.assertEqual(pool._connections, 20) - pool = PooledDB(dbapi, 1, 1, 1, 1, True) - self.assertEqual(pool._maxconnections, 1) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - db = pool.connection(False) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - - def connection(): - db = pool.connection() - cursor = db.cursor() - cursor.execute('set thread') - cursor.close() - db.close() - - from threading import Thread - thread = Thread(target=connection) - thread.start() - thread.join(0.1) - self.assertTrue(thread.is_alive()) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - session = db._con._con.session - self.assertEqual(session, ['rollback']) - del db - thread.join(0.1) - self.assertFalse(thread.is_alive()) - self.assertEqual(pool._connections, 0) - self.assertEqual(len(pool._idle_cache), 1) - if shareable: - self.assertEqual(len(pool._shared_cache), 0) - db = pool.connection(False) - self.assertEqual(pool._connections, 1) - self.assertEqual(len(pool._idle_cache), 0) - self.assertEqual( - session, ['rollback', 'rollback', 'thread', 'rollback']) - del db - - def test14_MaxUsage(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - for maxusage in (0, 3, 7): - pool = PooledDB(dbapi, 0, 0, 0, 1, False, maxusage) - self.assertEqual(pool._maxusage, maxusage) - self.assertEqual(len(pool._idle_cache), 0) - db = pool.connection(False) - self.assertEqual(db._con._maxusage, maxusage) - self.assertEqual(len(pool._idle_cache), 0) - self.assertEqual(db._con._con.open_cursors, 0) - self.assertEqual(db._usage, 0) - self.assertEqual(db._con._con.num_uses, 0) - self.assertEqual(db._con._con.num_queries, 0) - for i in range(20): - cursor = db.cursor() - self.assertEqual(db._con._con.open_cursors, 1) - cursor.execute('select test%i' % i) - r = cursor.fetchone() - self.assertEqual(r, 'test%i' % i) - cursor.close() - self.assertEqual(db._con._con.open_cursors, 0) - if maxusage: - j = i % maxusage + 1 - else: - j = i + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db._con._con.num_uses, j) - self.assertEqual(db._con._con.num_queries, j) - db.cursor().callproc('test') - self.assertEqual(db._con._con.open_cursors, 0) - self.assertEqual(db._usage, j + 1) - self.assertEqual(db._con._con.num_uses, j + 1) - self.assertEqual(db._con._con.num_queries, j) - - def test15_SetSession(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - setsession = ('set time zone', 'set datestyle') - pool = PooledDB(dbapi, 0, 0, 0, 1, False, None, setsession) - self.assertEqual(pool._setsession, setsession) - db = pool.connection(False) - self.assertEqual(db._setsession_sql, setsession) - self.assertEqual( - db._con._con.session, ['time zone', 'datestyle']) - db.cursor().execute('select test') - db.cursor().execute('set test1') - self.assertEqual(db._usage, 2) - self.assertEqual(db._con._con.num_uses, 4) - self.assertEqual(db._con._con.num_queries, 1) - self.assertEqual( - db._con._con.session, ['time zone', 'datestyle', 'test1']) - db.close() - db = pool.connection(False) - self.assertEqual(db._setsession_sql, setsession) - self.assertEqual( - db._con._con.session, - ['time zone', 'datestyle', 'test1', 'rollback']) - db._con._con.close() - db.cursor().execute('select test') - db.cursor().execute('set test2') - self.assertEqual( - db._con._con.session, ['time zone', 'datestyle', 'test2']) - - def test16_OneThreadTwoConnections(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - shareable = threadsafety > 1 - pool = PooledDB(dbapi, 2) - db1 = pool.connection() - for i in range(5): - db1.cursor().execute('select test') - db2 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(7): - db2.cursor().execute('select test') - self.assertEqual(db1._con._con.num_queries, 5) - self.assertEqual(db2._con._con.num_queries, 7) - del db1 - db1 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(3): - db1.cursor().execute('select test') - self.assertEqual(db1._con._con.num_queries, 8) - db2.cursor().execute('select test') - self.assertEqual(db2._con._con.num_queries, 8) - pool = PooledDB(dbapi, 0, 0, 2) - db1 = pool.connection() - for i in range(5): - db1.cursor().execute('select test') - db2 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(7): - db2.cursor().execute('select test') - self.assertEqual(db1._con._con.num_queries, 5) - self.assertEqual(db2._con._con.num_queries, 7) - del db1 - db1 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(3): - db1.cursor().execute('select test') - self.assertEqual(db1._con._con.num_queries, 8) - db2.cursor().execute('select test') - self.assertEqual(db2._con._con.num_queries, 8) - pool = PooledDB(dbapi, 0, 0, 1) - db1 = pool.connection() - db2 = pool.connection() - self.assertNotEqual(db1, db2) - if shareable: - self.assertEqual(db1._con, db2._con) - else: - self.assertNotEqual(db1._con, db2._con) - del db1 - db1 = pool.connection(False) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - - def test17_ThreeThreadsTwoConnections(self): - for threadsafety in (1, 2): - dbapi.threadsafety = threadsafety - pool = PooledDB(dbapi, 2, 2, 0, 2, True) - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queue = Queue(3) - - def connection(): - try: - queue.put(pool.connection(), 1, 1) - except Exception: - queue.put(pool.connection(), 1) - - from threading import Thread - for i in range(3): - Thread(target=connection).start() - try: - db1 = queue.get(1, 1) - db2 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - db2 = queue.get(1) - self.assertNotEqual(db1, db2) - db1_con = db1._con - db2_con = db2._con - self.assertNotEqual(db1_con, db2_con) - try: - self.assertRaises(Empty, queue.get, 1, 0.1) - except TypeError: - self.assertRaises(Empty, queue.get, 0) - del db1 - try: - db1 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - self.assertEqual(db1._con, db1_con) - pool = PooledDB(dbapi, 2, 2, 1, 2, True) - db1 = pool.connection(False) - db2 = pool.connection(False) - self.assertNotEqual(db1, db2) - db1_con = db1._con - db2_con = db2._con - self.assertNotEqual(db1_con, db2_con) - Thread(target=connection).start() - try: - self.assertRaises(Empty, queue.get, 1, 0.1) - except TypeError: - self.assertRaises(Empty, queue.get, 0) - del db1 - try: - db1 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - self.assertEqual(db1._con, db1_con) - - def test18_PingCheck(self): - Connection = dbapi.Connection - Connection.has_ping = True - Connection.num_pings = 0 - dbapi.threadsafety = 2 - pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 0) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 0) - db._con.close() - db.close() - db = pool.connection() - self.assertFalse(db._con._con.valid) - self.assertEqual(Connection.num_pings, 0) - pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 0) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 0) - db._con.close() - db = pool.connection() - self.assertFalse(db._con._con.valid) - self.assertEqual(Connection.num_pings, 0) - pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 1) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 1) - db._con.close() - db.close() - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 2) - pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 1) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 3) - db._con.close() - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 4) - pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 2) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 4) - db._con.close() - db = pool.connection() - self.assertFalse(db._con._con.valid) - self.assertEqual(Connection.num_pings, 4) - db.cursor() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 5) - pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 4) - db = pool.connection() - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 5) - db._con.close() - db = pool.connection() - self.assertFalse(db._con._con.valid) - self.assertEqual(Connection.num_pings, 5) - cursor = db.cursor() - db._con.close() - self.assertFalse(db._con._con.valid) - self.assertEqual(Connection.num_pings, 5) - cursor.execute('select test') - self.assertTrue(db._con._con.valid) - self.assertEqual(Connection.num_pings, 6) - Connection.has_ping = False - Connection.num_pings = 0 - - def test19_FailedTransaction(self): - dbapi.threadsafety = 2 - pool = PooledDB(dbapi, 0, 1, 1) - db = pool.connection() - cursor = db.cursor() - db._con._con.close() - cursor.execute('select test') - db.begin() - db._con._con.close() - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test') - cursor.execute('select test') - db.begin() - db.cancel() - db._con._con.close() - cursor.execute('select test') - pool = PooledDB(dbapi, 1, 1, 0) - db = pool.connection() - cursor = db.cursor() - db._con._con.close() - cursor.execute('select test') - db.begin() - db._con._con.close() - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test') - cursor.execute('select test') - db.begin() - db.cancel() - db._con._con.close() - cursor.execute('select test') - - def test20_SharedInTransaction(self): - dbapi.threadsafety = 2 - pool = PooledDB(dbapi, 0, 1, 1) - db = pool.connection() - db.begin() - pool.connection(False) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledDB(dbapi, 0, 2, 2) - db1 = pool.connection() - db2 = pool.connection() - self.assertIsNot(db2._con, db1._con) - db2.close() - db2 = pool.connection() - self.assertIsNot(db2._con, db1._con) - db = pool.connection() - self.assertIs(db._con, db1._con) - db.close() - db1.begin() - db = pool.connection() - self.assertIs(db._con, db2._con) - db.close() - db2.begin() - pool.connection(False) - self.assertRaises(TooManyConnections, pool.connection) - db1.rollback() - db = pool.connection() - self.assertIs(db._con, db1._con) - - def test21_ResetTransaction(self): - pool = PooledDB(dbapi, 1, 1, 0) - db = pool.connection() - db.begin() - con = db._con - self.assertTrue(con._transaction) - self.assertEqual(con._con.session, ['rollback']) - db.close() - self.assertIs(pool.connection()._con, con) - self.assertFalse(con._transaction) - self.assertEqual(con._con.session, ['rollback'] * 3) - pool = PooledDB(dbapi, 1, 1, 0, reset=False) - db = pool.connection() - db.begin() - con = db._con - self.assertTrue(con._transaction) - self.assertEqual(con._con.session, []) - db.close() - self.assertIs(pool.connection()._con, con) - self.assertFalse(con._transaction) - self.assertEqual(con._con.session, ['rollback']) - - -class TestSharedDBConnection(unittest.TestCase): - - def test01_CreateConnection(self): - db_con = dbapi.connect() - con = SharedDBConnection(db_con) - self.assertEqual(con.con, db_con) - self.assertEqual(con.shared, 1) - - def test01_ShareAndUnshare(self): - con = SharedDBConnection(dbapi.connect()) - self.assertEqual(con.shared, 1) - con.share() - self.assertEqual(con.shared, 2) - con.share() - self.assertEqual(con.shared, 3) - con.unshare() - self.assertEqual(con.shared, 2) - con.unshare() - self.assertEqual(con.shared, 1) - - def test02_Comparison(self): - con1 = SharedDBConnection(dbapi.connect()) - con1.con._transaction = False - con2 = SharedDBConnection(dbapi.connect()) - con2.con._transaction = False - self.assertTrue(con1 == con2) - self.assertTrue(con1 <= con2) - self.assertTrue(con1 >= con2) - self.assertFalse(con1 != con2) - self.assertFalse(con1 < con2) - self.assertFalse(con1 > con2) - con2.share() - self.assertFalse(con1 == con2) - self.assertTrue(con1 <= con2) - self.assertFalse(con1 >= con2) - self.assertTrue(con1 != con2) - self.assertTrue(con1 < con2) - self.assertFalse(con1 > con2) - con1.con._transaction = True - self.assertFalse(con1 == con2) - self.assertFalse(con1 <= con2) - self.assertTrue(con1 >= con2) - self.assertTrue(con1 != con2) - self.assertFalse(con1 < con2) - self.assertTrue(con1 > con2) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestPooledPg.py b/DBUtils/Tests/TestPooledPg.py deleted file mode 100644 index cd4103b..0000000 --- a/DBUtils/Tests/TestPooledPg.py +++ /dev/null @@ -1,314 +0,0 @@ -"""Test the PooledPg module. - -Note: -We don't test performance here, so the test does not predicate -whether PooledPg actually will help in improving performance or not. -We also assume that the underlying SteadyPg connections are tested. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_pg # noqa - -from DBUtils.PooledPg import PooledPg, InvalidConnection - -__version__ = '1.4' - - -class TestPooledPg(unittest.TestCase): - - def test0_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.PooledPg import __version__ as PooledPgVersion - self.assertEqual(PooledPgVersion, __version__) - self.assertEqual(PooledPg.version, __version__) - - def test1_CreateConnection(self): - pool = PooledPg( - 1, 1, 0, False, None, None, False, - 'PooledPgTestDB', user='PooledPgTestUser') - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 1) - self.assertTrue(hasattr(pool, '_maxusage')) - self.assertIsNone(pool._maxusage) - self.assertTrue(hasattr(pool, '_setsession')) - self.assertIsNone(pool._setsession) - self.assertTrue(hasattr(pool, '_reset')) - self.assertFalse(pool._reset) - db_con = pool._cache.get(0) - pool._cache.put(db_con, 0) - from DBUtils.SteadyPg import SteadyPgConnection - self.assertTrue(isinstance(db_con, SteadyPgConnection)) - db = pool.connection() - self.assertEqual(pool._cache.qsize(), 0) - self.assertTrue(hasattr(db, '_con')) - self.assertEqual(db._con, db_con) - self.assertTrue(hasattr(db, 'query')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertEqual(db.num_queries, 0) - self.assertTrue(hasattr(db, '_maxusage')) - self.assertEqual(db._maxusage, 0) - self.assertTrue(hasattr(db, '_setsession_sql')) - self.assertIsNone(db._setsession_sql) - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'PooledPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'PooledPgTestUser') - db.query('select test') - self.assertEqual(db.num_queries, 1) - pool = PooledPg(1) - db = pool.connection() - self.assertTrue(hasattr(db, 'dbname')) - self.assertIsNone(db.dbname) - self.assertTrue(hasattr(db, 'user')) - self.assertIsNone(db.user) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertEqual(db.num_queries, 0) - pool = PooledPg(0, 0, 0, False, 3, ('set datestyle',),) - self.assertEqual(pool._maxusage, 3) - self.assertEqual(pool._setsession, ('set datestyle',)) - db = pool.connection() - self.assertEqual(db._maxusage, 3) - self.assertEqual(db._setsession_sql, ('set datestyle',)) - - def test2_CloseConnection(self): - pool = PooledPg( - 0, 1, 0, False, None, None, False, - 'PooledPgTestDB', user='PooledPgTestUser') - db = pool.connection() - self.assertTrue(hasattr(db, '_con')) - db_con = db._con - from DBUtils.SteadyPg import SteadyPgConnection - self.assertTrue(isinstance(db_con, SteadyPgConnection)) - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 0) - self.assertEqual(db.num_queries, 0) - db.query('select test') - self.assertEqual(db.num_queries, 1) - db.close() - self.assertRaises(InvalidConnection, getattr, db, 'num_queries') - db = pool.connection() - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'PooledPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'PooledPgTestUser') - self.assertEqual(db.num_queries, 1) - db.query('select test') - self.assertEqual(db.num_queries, 2) - db = pool.connection() - self.assertEqual(pool._cache.qsize(), 1) - self.assertEqual(pool._cache.get(0), db_con) - - def test3_MinMaxCached(self): - pool = PooledPg(3) - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 3) - cache = [pool.connection() for i in range(3)] - self.assertEqual(pool._cache.qsize(), 0) - for i in range(3): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 3) - for i in range(6): - cache.append(pool.connection()) - self.assertEqual(pool._cache.qsize(), 0) - for i in range(6): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 6) - pool = PooledPg(3, 4) - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 3) - cache = [pool.connection() for i in range(3)] - self.assertEqual(pool._cache.qsize(), 0) - for i in range(3): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 3) - for i in range(6): - cache.append(pool.connection()) - self.assertEqual(pool._cache.qsize(), 0) - for i in range(6): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 4) - pool = PooledPg(3, 2) - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 3) - cache = [pool.connection() for i in range(4)] - self.assertEqual(pool._cache.qsize(), 0) - for i in range(4): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 3) - pool = PooledPg(2, 5) - self.assertTrue(hasattr(pool, '_cache')) - self.assertEqual(pool._cache.qsize(), 2) - cache = [pool.connection() for i in range(10)] - self.assertEqual(pool._cache.qsize(), 0) - for i in range(10): - cache.pop().close() - self.assertEqual(pool._cache.qsize(), 5) - - def test4_MaxConnections(self): - from DBUtils.PooledPg import TooManyConnections - pool = PooledPg(1, 2, 3) - self.assertEqual(pool._cache.qsize(), 1) - cache = [pool.connection() for i in range(3)] - self.assertEqual(pool._cache.qsize(), 0) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledPg(0, 1, 1, False) - self.assertEqual(pool._blocking, 0) - self.assertEqual(pool._cache.qsize(), 0) - db = pool.connection() - self.assertEqual(pool._cache.qsize(), 0) - self.assertRaises(TooManyConnections, pool.connection) - del db - del cache - pool = PooledPg(1, 2, 1) - self.assertEqual(pool._cache.qsize(), 1) - cache = [pool.connection()] - self.assertEqual(pool._cache.qsize(), 0) - cache.append(pool.connection()) - self.assertEqual(pool._cache.qsize(), 0) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledPg(3, 2, 1, False) - self.assertEqual(pool._cache.qsize(), 3) - cache = [pool.connection() for i in range(3)] - self.assertEqual(len(cache), 3) - self.assertEqual(pool._cache.qsize(), 0) - self.assertRaises(TooManyConnections, pool.connection) - pool = PooledPg(1, 1, 1, True) - self.assertEqual(pool._blocking, 1) - self.assertEqual(pool._cache.qsize(), 1) - db = pool.connection() - self.assertEqual(pool._cache.qsize(), 0) - - def connection(): - pool.connection().query('set thread') - - from threading import Thread - thread = Thread(target=connection) - thread.start() - thread.join(0.1) - self.assertTrue(thread.is_alive()) - self.assertEqual(pool._cache.qsize(), 0) - session = db._con.session - self.assertEqual(session, []) - del db - thread.join(0.1) - self.assertFalse(thread.is_alive()) - self.assertEqual(pool._cache.qsize(), 1) - db = pool.connection() - self.assertEqual(pool._cache.qsize(), 0) - self.assertEqual(session, ['thread']) - del db - - def test5_OneThreadTwoConnections(self): - pool = PooledPg(2) - db1 = pool.connection() - for i in range(5): - db1.query('select test') - db2 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(7): - db2.query('select test') - self.assertEqual(db1.num_queries, 5) - self.assertEqual(db2.num_queries, 7) - del db1 - db1 = pool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - self.assertTrue(hasattr(db1, 'query')) - for i in range(3): - db1.query('select test') - self.assertEqual(db1.num_queries, 8) - db2.query('select test') - self.assertEqual(db2.num_queries, 8) - - def test6_ThreeThreadsTwoConnections(self): - pool = PooledPg(2, 2, 2, True) - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queue = Queue(3) - - def connection(): - try: - queue.put(pool.connection(), 1, 1) - except TypeError: - queue.put(pool.connection(), 1) - - from threading import Thread - for i in range(3): - Thread(target=connection).start() - try: - db1 = queue.get(1, 1) - db2 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - db2 = queue.get(1) - db1_con = db1._con - db2_con = db2._con - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1_con, db2_con) - try: - self.assertRaises(Empty, queue.get, 1, 0.1) - except TypeError: - self.assertRaises(Empty, queue.get, 0) - del db1 - try: - db1 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - self.assertEqual(db1._con, db1_con) - - def test7_ResetTransaction(self): - pool = PooledPg(1) - db = pool.connection() - db.begin() - con = db._con - self.assertTrue(con._transaction) - db.query('select test') - self.assertEqual(con.num_queries, 1) - db.close() - self.assertIs(pool.connection()._con, con) - self.assertFalse(con._transaction) - self.assertEqual(con.session, ['begin', 'rollback']) - self.assertEqual(con.num_queries, 1) - pool = PooledPg(1, reset=1) - db = pool.connection() - db.begin() - con = db._con - self.assertTrue(con._transaction) - self.assertEqual(con.session, ['rollback', 'begin']) - db.query('select test') - self.assertEqual(con.num_queries, 1) - db.close() - self.assertIs(pool.connection()._con, con) - self.assertFalse(con._transaction) - self.assertEqual( - con.session, ['rollback', 'begin', 'rollback', 'rollback']) - self.assertEqual(con.num_queries, 1) - pool = PooledPg(1, reset=2) - db = pool.connection() - db.begin() - con = db._con - self.assertTrue(con._transaction) - self.assertEqual(con.session, ['begin']) - db.query('select test') - self.assertEqual(con.num_queries, 1) - db.close() - self.assertIs(pool.connection()._con, con) - self.assertFalse(con._transaction) - self.assertEqual(con.session, []) - self.assertEqual(con.num_queries, 0) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestSimplePooledDB.py b/DBUtils/Tests/TestSimplePooledDB.py deleted file mode 100644 index 28d2764..0000000 --- a/DBUtils/Tests/TestSimplePooledDB.py +++ /dev/null @@ -1,160 +0,0 @@ -"""Test the SimplePooledDB module. - -Note: -We don't test performance here, so the test does not predicate -whether SimplePooledDB actually will help in improving performance or not. -We also do not test any real world DB-API 2 module, we just -mock the basic connection functionality of an arbitrary module. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_db as dbapi - -from DBUtils import SimplePooledDB - -__version__ = '1.4' - - -def versionString(version): - """Create version string.""" - ver = [str(v) for v in version] - numbers, rest = ver[:2 if ver[2] == '0' else 3], ver[3:] - return '.'.join(numbers) + '-'.join(rest) - - -class TestSimplePooledDB(unittest.TestCase): - - def my_dbpool(self, mythreadsafety, maxConnections): - threadsafety = dbapi.threadsafety - dbapi.threadsafety = mythreadsafety - try: - return SimplePooledDB.PooledDB( - dbapi, maxConnections, - 'SimplePooledDBTestDB', 'SimplePooledDBTestUser') - finally: - dbapi.threadsafety = threadsafety - - def test0_check_version(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.Properties import version - self.assertEqual(versionString(version), __version__) - self.assertEqual(SimplePooledDB.__version__, __version__) - self.assertEqual(SimplePooledDB.PooledDB.version, __version__) - - def test1_no_threadsafety(self): - for threadsafety in (None, -1, 0, 4): - self.assertRaises( - SimplePooledDB.NotSupportedError, - self.my_dbpool, threadsafety, 1) - - def test2_create_connection(self): - for threadsafety in (1, 2, 3): - dbpool = self.my_dbpool(threadsafety, 1) - db = dbpool.connection() - self.assertTrue(hasattr(db, 'cursor')) - self.assertTrue(hasattr(db, 'open_cursors')) - self.assertEqual(db.open_cursors, 0) - self.assertTrue(hasattr(db, 'database')) - self.assertEqual(db.database, 'SimplePooledDBTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SimplePooledDBTestUser') - cursor = db.cursor() - self.assertEqual(db.open_cursors, 1) - del cursor - - def test3_close_connection(self): - for threadsafety in (1, 2, 3): - dbpool = self.my_dbpool(threadsafety, 1) - db = dbpool.connection() - self.assertEqual(db.open_cursors, 0) - cursor1 = db.cursor() - self.assertEqual(db.open_cursors, 1) - db.close() - self.assertFalse(hasattr(db, 'open_cursors')) - db = dbpool.connection() - self.assertTrue(hasattr(db, 'database')) - self.assertEqual(db.database, 'SimplePooledDBTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SimplePooledDBTestUser') - self.assertEqual(db.open_cursors, 1) - cursor2 = db.cursor() - self.assertEqual(db.open_cursors, 2) - del cursor2 - del cursor1 - - def test4_two_connections(self): - for threadsafety in (1, 2, 3): - dbpool = self.my_dbpool(threadsafety, 2) - db1 = dbpool.connection() - cursors1 = [db1.cursor() for i in range(5)] - db2 = dbpool.connection() - self.assertNotEqual(db1, db2) - cursors2 = [db2.cursor() for i in range(7)] - self.assertEqual(db1.open_cursors, 5) - self.assertEqual(db2.open_cursors, 7) - db1.close() - db1 = dbpool.connection() - self.assertNotEqual(db1, db2) - self.assertTrue(hasattr(db1, 'cursor')) - for i in range(3): - cursors1.append(db1.cursor()) - self.assertEqual(db1.open_cursors, 8) - cursors2.append(db2.cursor()) - self.assertEqual(db2.open_cursors, 8) - del cursors2 - del cursors1 - - def test5_threadsafety_1(self): - dbpool = self.my_dbpool(1, 2) - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queue = Queue(3) - - def connection(): - queue.put(dbpool.connection()) - - from threading import Thread - threads = [Thread(target=connection).start() for i in range(3)] - self.assertEqual(len(threads), 3) - try: - db1 = queue.get(1, 1) - db2 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - db2 = queue.get(1) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - try: - self.assertRaises(Empty, queue.get, 1, 0.1) - except TypeError: - self.assertRaises(Empty, queue.get, 0) - db2.close() - try: - db3 = queue.get(1, 1) - except TypeError: - db3 = queue.get(1) - self.assertNotEqual(db1, db3) - self.assertNotEqual(db1._con, db3._con) - - def test6_threadsafety_2(self): - for threadsafety in (2, 3): - dbpool = self.my_dbpool(threadsafety, 2) - db1 = dbpool.connection() - db2 = dbpool.connection() - cursors = [dbpool.connection().cursor() for i in range(100)] - self.assertEqual(db1.open_cursors, 50) - self.assertEqual(db2.open_cursors, 50) - del cursors - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestSimplePooledPg.py b/DBUtils/Tests/TestSimplePooledPg.py deleted file mode 100644 index 7ff7690..0000000 --- a/DBUtils/Tests/TestSimplePooledPg.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Test the SimplePooledPg module. - -Note: -We don't test performance here, so the test does not predicate -whether SimplePooledPg actually will help in improving performance or not. - - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_pg # noqa - -from DBUtils import SimplePooledPg - -__version__ = '1.4' - - -class TestSimplePooledPg(unittest.TestCase): - - def my_dbpool(self, maxConnections): - return SimplePooledPg.PooledPg( - maxConnections, 'SimplePooledPgTestDB', 'SimplePooledPgTestUser') - - def test0_check_version(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - self.assertEqual(SimplePooledPg.__version__, __version__) - self.assertEqual(SimplePooledPg.PooledPg.version, __version__) - - def test1_create_connection(self): - dbpool = self.my_dbpool(1) - db = dbpool.connection() - self.assertTrue(hasattr(db, 'query')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertEqual(db.num_queries, 0) - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'SimplePooledPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SimplePooledPgTestUser') - db.query('select 1') - self.assertEqual(db.num_queries, 1) - - def test2_close_connection(self): - dbpool = self.my_dbpool(1) - db = dbpool.connection() - self.assertEqual(db.num_queries, 0) - db.query('select 1') - self.assertEqual(db.num_queries, 1) - db.close() - self.assertFalse(hasattr(db, 'num_queries')) - db = dbpool.connection() - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'SimplePooledPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SimplePooledPgTestUser') - self.assertEqual(db.num_queries, 1) - db.query('select 1') - self.assertEqual(db.num_queries, 2) - - def test3_two_connections(self): - dbpool = self.my_dbpool(2) - db1 = dbpool.connection() - for i in range(5): - db1.query('select 1') - db2 = dbpool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - for i in range(7): - db2.query('select 1') - self.assertEqual(db1.num_queries, 5) - self.assertEqual(db2.num_queries, 7) - db1.close() - db1 = dbpool.connection() - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - self.assertTrue(hasattr(db1, 'query')) - for i in range(3): - db1.query('select 1') - self.assertEqual(db1.num_queries, 8) - db2.query('select 1') - self.assertEqual(db2.num_queries, 8) - - def test4_threads(self): - dbpool = self.my_dbpool(2) - try: - from queue import Queue, Empty - except ImportError: # Python 2 - from Queue import Queue, Empty - queue = Queue(3) - - def connection(): - queue.put(dbpool.connection()) - - from threading import Thread - threads = [Thread(target=connection).start() for i in range(3)] - self.assertEqual(len(threads), 3) - try: - db1 = queue.get(1, 1) - db2 = queue.get(1, 1) - except TypeError: - db1 = queue.get(1) - db2 = queue.get(1) - self.assertNotEqual(db1, db2) - self.assertNotEqual(db1._con, db2._con) - try: - self.assertRaises(Empty, queue.get, 1, 0.1) - except TypeError: - self.assertRaises(Empty, queue.get, 0) - db2.close() - try: - db3 = queue.get(1, 1) - except TypeError: - db3 = queue.get(1) - self.assertNotEqual(db1, db3) - self.assertNotEqual(db1._con, db3._con) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestSteadyDB.py b/DBUtils/Tests/TestSteadyDB.py deleted file mode 100644 index f9c7d15..0000000 --- a/DBUtils/Tests/TestSteadyDB.py +++ /dev/null @@ -1,662 +0,0 @@ -"""Test the SteadyDB module. - -Note: -We do not test any real DB-API 2 module, but we just -mock the basic DB-API 2 connection functionality. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest - -import DBUtils.Tests.mock_db as dbapi - -from DBUtils.SteadyDB import ( - connect as SteadyDBconnect, SteadyDBConnection, SteadyDBCursor) - -__version__ = '1.4' - - -class TestSteadyDB(unittest.TestCase): - - def test00_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.SteadyDB import __version__ as SteadyDBVersion - self.assertEqual(SteadyDBVersion, __version__) - self.assertEqual(SteadyDBConnection.version, __version__) - - def test01_MockedConnection(self): - db = dbapi.connect( - 'SteadyDBTestDB', user='SteadyDBTestUser') - db.__class__.has_ping = False - db.__class__.num_pings = 0 - self.assertTrue(hasattr(db, 'database')) - self.assertEqual(db.database, 'SteadyDBTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SteadyDBTestUser') - self.assertTrue(hasattr(db, 'cursor')) - self.assertTrue(hasattr(db, 'close')) - self.assertTrue(hasattr(db, 'open_cursors')) - self.assertTrue(hasattr(db, 'num_uses')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertTrue(hasattr(db, 'session')) - self.assertTrue(hasattr(db, 'valid')) - self.assertTrue(db.valid) - self.assertEqual(db.open_cursors, 0) - for i in range(3): - cursor = db.cursor() - self.assertEqual(db.open_cursors, 1) - cursor.close() - self.assertEqual(db.open_cursors, 0) - cursor = [] - for i in range(3): - cursor.append(db.cursor()) - self.assertEqual(db.open_cursors, i + 1) - del cursor - self.assertEqual(db.open_cursors, 0) - cursor = db.cursor() - self.assertTrue(hasattr(cursor, 'execute')) - self.assertTrue(hasattr(cursor, 'fetchone')) - self.assertTrue(hasattr(cursor, 'callproc')) - self.assertTrue(hasattr(cursor, 'close')) - self.assertTrue(hasattr(cursor, 'valid')) - self.assertTrue(cursor.valid) - self.assertEqual(db.open_cursors, 1) - for i in range(3): - self.assertEqual(db.num_uses, i) - self.assertEqual(db.num_queries, i) - cursor.execute('select test%d' % i) - self.assertEqual(cursor.fetchone(), 'test%d' % i) - self.assertTrue(cursor.valid) - self.assertEqual(db.open_cursors, 1) - for i in range(4): - cursor.callproc('test') - cursor.close() - self.assertFalse(cursor.valid) - self.assertEqual(db.open_cursors, 0) - self.assertEqual(db.num_uses, 7) - self.assertEqual(db.num_queries, 3) - self.assertRaises(dbapi.InternalError, cursor.close) - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test') - self.assertTrue(db.valid) - self.assertFalse(db.__class__.has_ping) - self.assertEqual(db.__class__.num_pings, 0) - self.assertRaises(AttributeError, db.ping) - self.assertEqual(db.__class__.num_pings, 1) - db.__class__.has_ping = True - self.assertIsNone(db.ping()) - self.assertEqual(db.__class__.num_pings, 2) - db.close() - self.assertFalse(db.valid) - self.assertEqual(db.num_uses, 0) - self.assertEqual(db.num_queries, 0) - self.assertRaises(dbapi.InternalError, db.close) - self.assertRaises(dbapi.InternalError, db.cursor) - self.assertRaises(dbapi.OperationalError, db.ping) - self.assertEqual(db.__class__.num_pings, 3) - db.__class__.has_ping = False - db.__class__.num_pings = 0 - - def test02_BrokenConnection(self): - self.assertRaises(TypeError, SteadyDBConnection, None) - self.assertRaises(TypeError, SteadyDBCursor, None) - db = SteadyDBconnect(dbapi, database='ok') - for i in range(3): - db.close() - del db - self.assertRaises( - dbapi.OperationalError, SteadyDBconnect, dbapi, database='error') - db = SteadyDBconnect(dbapi, database='ok') - cursor = db.cursor() - for i in range(3): - cursor.close() - cursor = db.cursor('ok') - for i in range(3): - cursor.close() - self.assertRaises(dbapi.OperationalError, db.cursor, 'error') - - def test03_Close(self): - for closeable in (False, True): - db = SteadyDBconnect(dbapi, closeable=closeable) - self.assertTrue(db._con.valid) - db.close() - self.assertTrue(closeable ^ db._con.valid) - db.close() - self.assertTrue(closeable ^ db._con.valid) - db._close() - self.assertFalse(db._con.valid) - db._close() - self.assertFalse(db._con.valid) - - def test04_Connection(self): - db = SteadyDBconnect( - dbapi, 0, None, None, None, True, - 'SteadyDBTestDB', user='SteadyDBTestUser') - self.assertTrue(isinstance(db, SteadyDBConnection)) - self.assertTrue(hasattr(db, '_con')) - self.assertTrue(hasattr(db, '_usage')) - self.assertEqual(db._usage, 0) - self.assertTrue(hasattr(db._con, 'valid')) - self.assertTrue(db._con.valid) - self.assertTrue(hasattr(db._con, 'cursor')) - self.assertTrue(hasattr(db._con, 'close')) - self.assertTrue(hasattr(db._con, 'open_cursors')) - self.assertTrue(hasattr(db._con, 'num_uses')) - self.assertTrue(hasattr(db._con, 'num_queries')) - self.assertTrue(hasattr(db._con, 'session')) - self.assertTrue(hasattr(db._con, 'database')) - self.assertEqual(db._con.database, 'SteadyDBTestDB') - self.assertTrue(hasattr(db._con, 'user')) - self.assertEqual(db._con.user, 'SteadyDBTestUser') - self.assertTrue(hasattr(db, 'cursor')) - self.assertTrue(hasattr(db, 'close')) - self.assertEqual(db._con.open_cursors, 0) - for i in range(3): - cursor = db.cursor() - self.assertEqual(db._con.open_cursors, 1) - cursor.close() - self.assertEqual(db._con.open_cursors, 0) - cursor = [] - for i in range(3): - cursor.append(db.cursor()) - self.assertEqual(db._con.open_cursors, i + 1) - del cursor - self.assertEqual(db._con.open_cursors, 0) - cursor = db.cursor() - self.assertTrue(hasattr(cursor, 'execute')) - self.assertTrue(hasattr(cursor, 'fetchone')) - self.assertTrue(hasattr(cursor, 'callproc')) - self.assertTrue(hasattr(cursor, 'close')) - self.assertTrue(hasattr(cursor, 'valid')) - self.assertTrue(cursor.valid) - self.assertEqual(db._con.open_cursors, 1) - for i in range(3): - self.assertEqual(db._usage, i) - self.assertEqual(db._con.num_uses, i) - self.assertEqual(db._con.num_queries, i) - cursor.execute('select test%d' % i) - self.assertEqual(cursor.fetchone(), 'test%d' % i) - self.assertTrue(cursor.valid) - self.assertEqual(db._con.open_cursors, 1) - for i in range(4): - cursor.callproc('test') - cursor.close() - self.assertFalse(cursor.valid) - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 7) - self.assertEqual(db._con.num_uses, 7) - self.assertEqual(db._con.num_queries, 3) - cursor.close() - cursor.execute('select test8') - self.assertTrue(cursor.valid) - self.assertEqual(db._con.open_cursors, 1) - self.assertEqual(cursor.fetchone(), 'test8') - self.assertEqual(db._usage, 8) - self.assertEqual(db._con.num_uses, 8) - self.assertEqual(db._con.num_queries, 4) - self.assertTrue(db._con.valid) - db.close() - self.assertFalse(db._con.valid) - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 8) - self.assertEqual(db._con.num_uses, 0) - self.assertEqual(db._con.num_queries, 0) - self.assertRaises(dbapi.InternalError, db._con.close) - db.close() - self.assertRaises(dbapi.InternalError, db._con.cursor) - cursor = db.cursor() - self.assertTrue(db._con.valid) - cursor.execute('select test11') - self.assertEqual(cursor.fetchone(), 'test11') - cursor.execute('select test12') - self.assertEqual(cursor.fetchone(), 'test12') - cursor.callproc('test') - self.assertEqual(db._usage, 3) - self.assertEqual(db._con.num_uses, 3) - self.assertEqual(db._con.num_queries, 2) - cursor2 = db.cursor() - self.assertEqual(db._con.open_cursors, 2) - cursor2.execute('select test13') - self.assertEqual(cursor2.fetchone(), 'test13') - self.assertEqual(db._con.num_queries, 3) - db.close() - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._con.num_queries, 0) - cursor = db.cursor() - self.assertTrue(cursor.valid) - cursor.callproc('test') - cursor._cursor.valid = False - self.assertFalse(cursor.valid) - self.assertRaises(dbapi.InternalError, cursor._cursor.callproc, 'test') - cursor.callproc('test') - self.assertTrue(cursor.valid) - cursor._cursor.callproc('test') - self.assertEqual(db._usage, 2) - self.assertEqual(db._con.num_uses, 3) - db._con.valid = cursor._cursor.valid = False - cursor.callproc('test') - self.assertTrue(cursor.valid) - self.assertEqual(db._usage, 1) - self.assertEqual(db._con.num_uses, 1) - cursor.execute('set doit') - db.commit() - cursor.execute('set dont') - db.rollback() - self.assertEqual( - db._con.session, ['doit', 'commit', 'dont', 'rollback']) - - def test05_ConnectionContextHandler(self): - db = SteadyDBconnect( - dbapi, 0, None, None, None, True, - 'SteadyDBTestDB', user='SteadyDBTestUser') - self.assertEqual(db._con.session, []) - with db as con: - con.cursor().execute('select test') - self.assertEqual(db._con.session, ['commit']) - try: - with db as con: - con.cursor().execute('error') - except dbapi.ProgrammingError: - error = True - else: - error = False - self.assertTrue(error) - self.assertEqual(db._con.session, ['commit', 'rollback']) - - def test06_CursorContextHandler(self): - db = SteadyDBconnect( - dbapi, 0, None, None, None, True, - 'SteadyDBTestDB', user='SteadyDBTestUser') - self.assertEqual(db._con.open_cursors, 0) - with db.cursor() as cursor: - self.assertEqual(db._con.open_cursors, 1) - cursor.execute('select test') - self.assertEqual(cursor.fetchone(), 'test') - self.assertEqual(db._con.open_cursors, 0) - - def test07_ConnectionCreatorFunction(self): - db1 = SteadyDBconnect( - dbapi, 0, None, None, None, True, - 'SteadyDBTestDB', user='SteadyDBTestUser') - db2 = SteadyDBconnect( - dbapi.connect, 0, None, None, None, True, - 'SteadyDBTestDB', user='SteadyDBTestUser') - self.assertEqual(db1.dbapi(), db2.dbapi()) - self.assertEqual(db1.threadsafety(), db2.threadsafety()) - self.assertEqual(db1._creator, db2._creator) - self.assertEqual(db1._args, db2._args) - self.assertEqual(db1._kwargs, db2._kwargs) - db2.close() - db1.close() - - def test08_ConnectionMaxUsage(self): - db = SteadyDBconnect(dbapi, 10) - cursor = db.cursor() - for i in range(100): - cursor.execute('select test%d' % i) - r = cursor.fetchone() - self.assertEqual(r, 'test%d' % i) - self.assertTrue(db._con.valid) - j = i % 10 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db._con.num_uses, j) - self.assertEqual(db._con.num_queries, j) - self.assertEqual(db._con.open_cursors, 1) - db.begin() - for i in range(100): - cursor.callproc('test') - self.assertTrue(db._con.valid) - if i == 49: - db.commit() - j = i % 10 + 1 if i > 49 else i + 11 - self.assertEqual(db._usage, j) - self.assertEqual(db._con.num_uses, j) - j = 0 if i > 49 else 10 - self.assertEqual(db._con.num_queries, j) - for i in range(10): - if i == 7: - db._con.valid = cursor._cursor.valid = False - cursor.execute('select test%d' % i) - r = cursor.fetchone() - self.assertEqual(r, 'test%d' % i) - j = i % 7 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db._con.num_uses, j) - self.assertEqual(db._con.num_queries, j) - for i in range(10): - if i == 5: - db._con.valid = cursor._cursor.valid = False - cursor.callproc('test') - j = (i + (3 if i < 5 else -5)) % 10 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db._con.num_uses, j) - j = 3 if i < 5 else 0 - self.assertEqual(db._con.num_queries, j) - db.close() - cursor.execute('select test1') - self.assertEqual(cursor.fetchone(), 'test1') - self.assertEqual(db._usage, 1) - self.assertEqual(db._con.num_uses, 1) - self.assertEqual(db._con.num_queries, 1) - - def test09_ConnectionSetSession(self): - db = SteadyDBconnect(dbapi, 3, ('set time zone', 'set datestyle')) - self.assertTrue(hasattr(db, '_usage')) - self.assertEqual(db._usage, 0) - self.assertTrue(hasattr(db._con, 'open_cursors')) - self.assertEqual(db._con.open_cursors, 0) - self.assertTrue(hasattr(db._con, 'num_uses')) - self.assertEqual(db._con.num_uses, 2) - self.assertTrue(hasattr(db._con, 'num_queries')) - self.assertEqual(db._con.num_queries, 0) - self.assertTrue(hasattr(db._con, 'session')) - self.assertEqual(tuple(db._con.session), ('time zone', 'datestyle')) - for i in range(11): - db.cursor().execute('select test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 2) - self.assertEqual(db._con.num_uses, 4) - self.assertEqual(db._con.num_queries, 2) - self.assertEqual(db._con.session, ['time zone', 'datestyle']) - db.cursor().execute('set test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 3) - self.assertEqual(db._con.num_uses, 5) - self.assertEqual(db._con.num_queries, 2) - self.assertEqual(db._con.session, ['time zone', 'datestyle', 'test']) - db.cursor().execute('select test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 1) - self.assertEqual(db._con.num_uses, 3) - self.assertEqual(db._con.num_queries, 1) - self.assertEqual(db._con.session, ['time zone', 'datestyle']) - db.cursor().execute('set test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 2) - self.assertEqual(db._con.num_uses, 4) - self.assertEqual(db._con.num_queries, 1) - self.assertEqual(db._con.session, ['time zone', 'datestyle', 'test']) - db.cursor().execute('select test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 3) - self.assertEqual(db._con.num_uses, 5) - self.assertEqual(db._con.num_queries, 2) - self.assertEqual(db._con.session, ['time zone', 'datestyle', 'test']) - db.close() - db.cursor().execute('set test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 1) - self.assertEqual(db._con.num_uses, 3) - self.assertEqual(db._con.num_queries, 0) - self.assertEqual(db._con.session, ['time zone', 'datestyle', 'test']) - db.close() - db.cursor().execute('select test') - self.assertEqual(db._con.open_cursors, 0) - self.assertEqual(db._usage, 1) - self.assertEqual(db._con.num_uses, 3) - self.assertEqual(db._con.num_queries, 1) - self.assertEqual(db._con.session, ['time zone', 'datestyle']) - - def test10_ConnectionFailures(self): - db = SteadyDBconnect(dbapi) - db.close() - db.cursor() - db = SteadyDBconnect(dbapi, failures=dbapi.InternalError) - db.close() - db.cursor() - db = SteadyDBconnect(dbapi, failures=dbapi.OperationalError) - db.close() - self.assertRaises(dbapi.InternalError, db.cursor) - db = SteadyDBconnect( - dbapi, failures=(dbapi.OperationalError, dbapi.InternalError)) - db.close() - db.cursor() - - def test11_ConnectionFailureError(self): - db = SteadyDBconnect(dbapi) - cursor = db.cursor() - db.close() - cursor.execute('select test') - cursor = db.cursor() - db.close() - self.assertRaises(dbapi.ProgrammingError, cursor.execute, 'error') - - def test12_ConnectionSetSizes(self): - db = SteadyDBconnect(dbapi) - cursor = db.cursor() - cursor.execute('get sizes') - result = cursor.fetchone() - self.assertEqual(result, ([], {})) - cursor.setinputsizes([7, 42, 6]) - cursor.setoutputsize(9) - cursor.setoutputsize(15, 3) - cursor.setoutputsize(42, 7) - cursor.execute('get sizes') - result = cursor.fetchone() - self.assertEqual(result, ([7, 42, 6], {None: 9, 3: 15, 7: 42})) - cursor.execute('get sizes') - result = cursor.fetchone() - self.assertEqual(result, ([], {})) - cursor.setinputsizes([6, 42, 7]) - cursor.setoutputsize(7) - cursor.setoutputsize(15, 3) - cursor.setoutputsize(42, 9) - db.close() - cursor.execute('get sizes') - result = cursor.fetchone() - self.assertEqual(result, ([6, 42, 7], {None: 7, 3: 15, 9: 42})) - - def test13_ConnectionPingCheck(self): - Connection = dbapi.Connection - Connection.has_ping = False - Connection.num_pings = 0 - db = SteadyDBconnect(dbapi) - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 0) - db.close() - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 0) - self.assertIsNone(db._ping_check()) - self.assertEqual(Connection.num_pings, 1) - db = SteadyDBconnect(dbapi, ping=7) - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 2) - db.close() - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 2) - self.assertIsNone(db._ping_check()) - self.assertEqual(Connection.num_pings, 2) - Connection.has_ping = True - db = SteadyDBconnect(dbapi) - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 2) - db.close() - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 2) - self.assertTrue(db._ping_check()) - self.assertEqual(Connection.num_pings, 3) - db = SteadyDBconnect(dbapi, ping=1) - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 3) - db.close() - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 3) - self.assertTrue(db._ping_check()) - self.assertEqual(Connection.num_pings, 4) - db.close() - self.assertTrue(db._ping_check()) - self.assertEqual(Connection.num_pings, 5) - db = SteadyDBconnect(dbapi, ping=7) - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 7) - db.close() - db.cursor().execute('select test') - self.assertEqual(Connection.num_pings, 9) - db = SteadyDBconnect(dbapi, ping=3) - self.assertEqual(Connection.num_pings, 9) - db.cursor() - self.assertEqual(Connection.num_pings, 10) - db.close() - cursor = db.cursor() - self.assertEqual(Connection.num_pings, 11) - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 11) - db = SteadyDBconnect(dbapi, ping=5) - self.assertEqual(Connection.num_pings, 11) - db.cursor() - self.assertEqual(Connection.num_pings, 11) - db.close() - cursor = db.cursor() - self.assertEqual(Connection.num_pings, 11) - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 12) - db.close() - cursor = db.cursor() - self.assertEqual(Connection.num_pings, 12) - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 13) - db = SteadyDBconnect(dbapi, ping=7) - self.assertEqual(Connection.num_pings, 13) - db.cursor() - self.assertEqual(Connection.num_pings, 14) - db.close() - cursor = db.cursor() - self.assertEqual(Connection.num_pings, 15) - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 16) - db.close() - cursor = db.cursor() - self.assertEqual(Connection.num_pings, 17) - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 18) - db.close() - cursor.execute('select test') - self.assertEqual(Connection.num_pings, 20) - Connection.has_ping = False - Connection.num_pings = 0 - - def test14_BeginTransaction(self): - db = SteadyDBconnect(dbapi, database='ok') - cursor = db.cursor() - cursor.close() - cursor.execute('select test12') - self.assertEqual(cursor.fetchone(), 'test12') - db.begin() - cursor = db.cursor() - cursor.close() - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test12') - cursor.execute('select test12') - self.assertEqual(cursor.fetchone(), 'test12') - db.close() - db.begin() - self.assertRaises(dbapi.InternalError, cursor.execute, 'select test12') - cursor.execute('select test12') - self.assertEqual(cursor.fetchone(), 'test12') - db.begin() - self.assertRaises(dbapi.ProgrammingError, cursor.execute, 'error') - cursor.close() - cursor.execute('select test12') - self.assertEqual(cursor.fetchone(), 'test12') - - def test15_WithBeginExtension(self): - db = SteadyDBconnect(dbapi, database='ok') - db._con._begin_called_with = None - - def begin(a, b=None, c=7): - db._con._begin_called_with = (a, b, c) - - db._con.begin = begin - db.begin(42, 6) - cursor = db.cursor() - cursor.execute('select test13') - self.assertEqual(cursor.fetchone(), 'test13') - self.assertEqual(db._con._begin_called_with, (42, 6, 7)) - - def test16_CancelTransaction(self): - db = SteadyDBconnect(dbapi, database='ok') - cursor = db.cursor() - db.begin() - cursor.execute('select test14') - self.assertEqual(cursor.fetchone(), 'test14') - db.cancel() - cursor.execute('select test14') - self.assertEqual(cursor.fetchone(), 'test14') - - def test17_WithCancelExtension(self): - db = SteadyDBconnect(dbapi, database='ok') - db._con._cancel_called = None - - def cancel(): - db._con._cancel_called = 'yes' - - db._con.cancel = cancel - db.begin() - cursor = db.cursor() - cursor.execute('select test15') - self.assertEqual(cursor.fetchone(), 'test15') - db.cancel() - self.assertEqual(db._con._cancel_called, 'yes') - - def test18_ResetTransaction(self): - db = SteadyDBconnect(dbapi, database='ok') - db.begin() - self.assertFalse(db._con.session) - db.close() - self.assertFalse(db._con.session) - db = SteadyDBconnect(dbapi, database='ok', closeable=False) - db.begin() - self.assertFalse(db._con.session) - db.close() - self.assertEqual(db._con.session, ['rollback']) - - def test19_CommitError(self): - db = SteadyDBconnect(dbapi, database='ok') - db.begin() - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - db.commit() - self.assertEqual(db._con.session, ['commit']) - self.assertTrue(db._con.valid) - db.begin() - db._con.valid = False - con = db._con - self.assertRaises(dbapi.InternalError, db.commit) - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - self.assertIsNot(con, db._con) - db.begin() - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - db.commit() - self.assertEqual(db._con.session, ['commit']) - self.assertTrue(db._con.valid) - - def test20_RollbackError(self): - db = SteadyDBconnect(dbapi, database='ok') - db.begin() - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - db.rollback() - self.assertEqual(db._con.session, ['rollback']) - self.assertTrue(db._con.valid) - db.begin() - db._con.valid = False - con = db._con - self.assertRaises(dbapi.InternalError, db.rollback) - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - self.assertIsNot(con, db._con) - db.begin() - self.assertFalse(db._con.session) - self.assertTrue(db._con.valid) - db.rollback() - self.assertEqual(db._con.session, ['rollback']) - self.assertTrue(db._con.valid) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestSteadyPg.py b/DBUtils/Tests/TestSteadyPg.py deleted file mode 100644 index e566a7b..0000000 --- a/DBUtils/Tests/TestSteadyPg.py +++ /dev/null @@ -1,327 +0,0 @@ -"""Test the SteadyPg module. - -Note: -We do not test the real PyGreSQL module, but we just -mock the basic connection functionality of that module. -We assume that the PyGreSQL module will detect lost -connections correctly and set the status flag accordingly. - -Copyright and credit info: - -* This test was contributed by Christoph Zwerschke - -""" - -import unittest -import sys - -import DBUtils.Tests.mock_pg as pg - -from DBUtils.SteadyPg import SteadyPgConnection - -__version__ = '1.4' - - -class TestSteadyPg(unittest.TestCase): - - def test0_CheckVersion(self): - from DBUtils import __version__ as DBUtilsVersion - self.assertEqual(DBUtilsVersion, __version__) - from DBUtils.SteadyPg import __version__ as SteadyPgVersion - self.assertEqual(SteadyPgVersion, __version__) - self.assertEqual(SteadyPgConnection.version, __version__) - - def test1_MockedConnection(self): - PgConnection = pg.DB - db = PgConnection( - 'SteadyPgTestDB', user='SteadyPgTestUser') - self.assertTrue(hasattr(db, 'db')) - self.assertTrue(hasattr(db.db, 'status')) - self.assertTrue(db.db.status) - self.assertTrue(hasattr(db.db, 'query')) - self.assertTrue(hasattr(db.db, 'close')) - self.assertFalse(hasattr(db.db, 'reopen')) - self.assertTrue(hasattr(db, 'reset')) - self.assertTrue(hasattr(db.db, 'num_queries')) - self.assertTrue(hasattr(db.db, 'session')) - self.assertFalse(hasattr(db.db, 'get_tables')) - self.assertTrue(hasattr(db.db, 'db')) - self.assertEqual(db.db.db, 'SteadyPgTestDB') - self.assertTrue(hasattr(db.db, 'user')) - self.assertEqual(db.db.user, 'SteadyPgTestUser') - self.assertTrue(hasattr(db, 'query')) - self.assertTrue(hasattr(db, 'close')) - self.assertTrue(hasattr(db, 'reopen')) - self.assertTrue(hasattr(db, 'reset')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertTrue(hasattr(db, 'session')) - self.assertTrue(hasattr(db, 'get_tables')) - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'SteadyPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SteadyPgTestUser') - for i in range(3): - self.assertEqual(db.num_queries, i) - self.assertEqual( - db.query('select test%d' % i), 'test%d' % i) - self.assertTrue(db.db.status) - db.reopen() - self.assertTrue(db.db.status) - self.assertEqual(db.num_queries, 0) - self.assertEqual(db.query('select test4'), 'test4') - self.assertEqual(db.get_tables(), 'test') - db.close() - try: - status = db.db.status - except AttributeError: - status = False - self.assertFalse(status) - self.assertRaises(pg.InternalError, db.close) - self.assertRaises(pg.InternalError, db.query, 'select test') - self.assertRaises(pg.InternalError, db.get_tables) - - def test2_BrokenConnection(self): - self.assertRaises(TypeError, SteadyPgConnection, 'wrong') - db = SteadyPgConnection(dbname='ok') - InternalError = sys.modules[db._con.__module__].InternalError - for i in range(3): - db.close() - del db - self.assertRaises(InternalError, SteadyPgConnection, dbname='error') - - def test3_Close(self): - for closeable in (False, True): - db = SteadyPgConnection(closeable=closeable) - self.assertTrue(db._con.db and db._con.valid) - db.close() - self.assertTrue( - closeable ^ (db._con.db is not None and db._con.valid)) - db.close() - self.assertTrue( - closeable ^ (db._con.db is not None and db._con.valid)) - db._close() - self.assertFalse(db._con.db and db._con.valid) - db._close() - self.assertFalse(db._con.db and db._con.valid) - - def test4_Connection(self): - db = SteadyPgConnection( - 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') - self.assertTrue(hasattr(db, 'db')) - self.assertTrue(hasattr(db, '_con')) - self.assertEqual(db.db, db._con.db) - self.assertTrue(hasattr(db, '_usage')) - self.assertEqual(db._usage, 0) - self.assertTrue(hasattr(db.db, 'status')) - self.assertTrue(db.db.status) - self.assertTrue(hasattr(db.db, 'query')) - self.assertTrue(hasattr(db.db, 'close')) - self.assertFalse(hasattr(db.db, 'reopen')) - self.assertTrue(hasattr(db.db, 'reset')) - self.assertTrue(hasattr(db.db, 'num_queries')) - self.assertTrue(hasattr(db.db, 'session')) - self.assertTrue(hasattr(db.db, 'db')) - self.assertEqual(db.db.db, 'SteadyPgTestDB') - self.assertTrue(hasattr(db.db, 'user')) - self.assertEqual(db.db.user, 'SteadyPgTestUser') - self.assertFalse(hasattr(db.db, 'get_tables')) - self.assertTrue(hasattr(db, 'query')) - self.assertTrue(hasattr(db, 'close')) - self.assertTrue(hasattr(db, 'reopen')) - self.assertTrue(hasattr(db, 'reset')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertTrue(hasattr(db, 'session')) - self.assertTrue(hasattr(db, 'dbname')) - self.assertEqual(db.dbname, 'SteadyPgTestDB') - self.assertTrue(hasattr(db, 'user')) - self.assertEqual(db.user, 'SteadyPgTestUser') - self.assertTrue(hasattr(db, 'get_tables')) - for i in range(3): - self.assertEqual(db._usage, i) - self.assertEqual(db.num_queries, i) - self.assertEqual( - db.query('select test%d' % i), 'test%d' % i) - self.assertTrue(db.db.status) - self.assertEqual(db.get_tables(), 'test') - self.assertTrue(db.db.status) - self.assertEqual(db._usage, 4) - self.assertEqual(db.num_queries, 3) - db.reopen() - self.assertTrue(db.db.status) - self.assertEqual(db._usage, 0) - self.assertEqual(db.num_queries, 0) - self.assertEqual(db.query('select test'), 'test') - self.assertTrue(db.db.status) - self.assertTrue(hasattr(db._con, 'status')) - self.assertTrue(db._con.status) - self.assertTrue(hasattr(db._con, 'close')) - self.assertTrue(hasattr(db._con, 'query')) - db.close() - try: - status = db.db.status - except AttributeError: - status = False - self.assertFalse(status) - self.assertTrue(hasattr(db._con, 'close')) - self.assertTrue(hasattr(db._con, 'query')) - InternalError = sys.modules[db._con.__module__].InternalError - self.assertRaises(InternalError, db._con.close) - self.assertRaises(InternalError, db._con.query, 'select test') - self.assertEqual(db.query('select test'), 'test') - self.assertTrue(db.db.status) - self.assertEqual(db._usage, 1) - self.assertEqual(db.num_queries, 1) - db.db.status = False - self.assertFalse(db.db.status) - self.assertEqual(db.query('select test'), 'test') - self.assertTrue(db.db.status) - self.assertEqual(db._usage, 1) - self.assertEqual(db.num_queries, 1) - db.db.status = False - self.assertFalse(db.db.status) - self.assertEqual(db.get_tables(), 'test') - self.assertTrue(db.db.status) - self.assertEqual(db._usage, 1) - self.assertEqual(db.num_queries, 0) - - def test5_ConnectionContextHandler(self): - db = SteadyPgConnection( - 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') - self.assertEqual(db.session, []) - with db: - db.query('select test') - self.assertEqual(db.session, ['begin', 'commit']) - try: - with db: - db.query('error') - except pg.ProgrammingError: - error = True - else: - error = False - self.assertTrue(error) - self.assertEqual( - db._con.session, ['begin', 'commit', 'begin', 'rollback']) - - def test6_ConnectionMaxUsage(self): - db = SteadyPgConnection(10) - for i in range(100): - r = db.query('select test%d' % i) - self.assertEqual(r, 'test%d' % i) - self.assertTrue(db.db.status) - j = i % 10 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db.num_queries, j) - db.begin() - for i in range(100): - r = db.get_tables() - self.assertEqual(r, 'test') - self.assertTrue(db.db.status) - if i == 49: - db.commit() - j = i % 10 + 1 if i > 49 else i + 11 - self.assertEqual(db._usage, j) - j = 0 if i > 49 else 10 - self.assertEqual(db.num_queries, j) - for i in range(10): - if i == 7: - db.db.status = False - r = db.query('select test%d' % i) - self.assertEqual(r, 'test%d' % i) - j = i % 7 + 1 - self.assertEqual(db._usage, j) - self.assertEqual(db.num_queries, j) - for i in range(10): - if i == 5: - db.db.status = False - r = db.get_tables() - self.assertEqual(r, 'test') - j = (i + (3 if i < 5 else -5)) % 10 + 1 - self.assertEqual(db._usage, j) - j = 3 if i < 5 else 0 - self.assertEqual(db.num_queries, j) - db.close() - self.assertEqual(db.query('select test1'), 'test1') - self.assertEqual(db._usage, 1) - self.assertEqual(db.num_queries, 1) - db.reopen() - self.assertEqual(db._usage, 0) - self.assertEqual(db.num_queries, 0) - self.assertEqual(db.query('select test2'), 'test2') - self.assertEqual(db._usage, 1) - self.assertEqual(db.num_queries, 1) - - def test7_ConnectionSetSession(self): - db = SteadyPgConnection(3, ('set time zone', 'set datestyle')) - self.assertTrue(hasattr(db, 'num_queries')) - self.assertEqual(db.num_queries, 0) - self.assertTrue(hasattr(db, 'session')) - self.assertEqual(tuple(db.session), ('time zone', 'datestyle')) - for i in range(11): - db.query('select test') - self.assertEqual(db.num_queries, 2) - self.assertEqual(db.session, ['time zone', 'datestyle']) - db.query('set test') - self.assertEqual(db.num_queries, 2) - self.assertEqual(db.session, ['time zone', 'datestyle', 'test']) - db.query('select test') - self.assertEqual(db.num_queries, 1) - self.assertEqual(db.session, ['time zone', 'datestyle']) - db.close() - db.query('set test') - self.assertEqual(db.num_queries, 0) - self.assertEqual(db.session, ['time zone', 'datestyle', 'test']) - - def test8_Begin(self): - for closeable in (False, True): - db = SteadyPgConnection(closeable=closeable) - db.begin() - self.assertEqual(db.session, ['begin']) - db.query('select test') - self.assertEqual(db.num_queries, 1) - db.close() - db.query('select test') - self.assertEqual(db.num_queries, 1) - db.begin() - self.assertEqual(db.session, ['begin']) - db.db.close() - self.assertRaises(pg.InternalError, db.query, 'select test') - self.assertEqual(db.num_queries, 0) - db.query('select test') - self.assertEqual(db.num_queries, 1) - self.assertEqual(db.begin('select sql:begin'), 'sql:begin') - self.assertEqual(db.num_queries, 2) - - def test9_End(self): - for closeable in (False, True): - db = SteadyPgConnection(closeable=closeable) - db.begin() - db.query('select test') - db.end() - self.assertEqual(db.session, ['begin', 'end']) - db.db.close() - db.query('select test') - self.assertEqual(db.num_queries, 1) - self.assertEqual(db.begin('select sql:end'), 'sql:end') - self.assertEqual(db.num_queries, 2) - db.begin() - db.query('select test') - db.commit() - self.assertEqual(db.session, ['begin', 'commit']) - db.db.close() - db.query('select test') - self.assertEqual(db.num_queries, 1) - self.assertEqual(db.begin('select sql:commit'), 'sql:commit') - self.assertEqual(db.num_queries, 2) - db.begin() - db.query('select test') - db.rollback() - self.assertEqual(db.session, ['begin', 'rollback']) - db.db.close() - db.query('select test') - self.assertEqual(db.num_queries, 1) - self.assertEqual(db.begin('select sql:rollback'), 'sql:rollback') - self.assertEqual(db.num_queries, 2) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/TestThreadingLocal.py b/DBUtils/Tests/TestThreadingLocal.py deleted file mode 100644 index 424ea2f..0000000 --- a/DBUtils/Tests/TestThreadingLocal.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Test the ThreadingLocal module.""" - -import unittest -from threading import Thread - -from DBUtils.PersistentDB import local - -__version__ = '1.4' - - -class TestThreadingLocal(unittest.TestCase): - - def test0_GetAttr(self): - mydata = local() - mydata.number = 42 - self.assertEqual(mydata.number, 42) - - def test1_Dict(self): - mydata = local() - mydata.number = 42 - self.assertEqual(mydata.__dict__, {'number': 42}) - mydata.__dict__.setdefault('widgets', []) - self.assertEqual(mydata.widgets, []) - - def test2_ThreadLocal(self): - def f(): - items = sorted(mydata.__dict__.items()) - log.append(items) - mydata.number = 11 - log.append(mydata.number) - mydata = local() - mydata.number = 42 - log = [] - thread = Thread(target=f) - thread.start() - thread.join() - self.assertEqual(log, [[], 11]) - self.assertEqual(mydata.number, 42) - - def test3_SubClass(self): - - class MyLocal(local): - number = 2 - initialized = 0 - - def __init__(self, **kw): - if self.initialized: - raise SystemError - self.initialized = 1 - self.__dict__.update(kw) - - def squared(self): - return self.number ** 2 - - mydata = MyLocal(color='red') - self.assertEqual(mydata.number, 2) - self.assertEqual(mydata.color, 'red') - del mydata.color - self.assertEqual(mydata.squared(), 4) - - def f(): - items = sorted(mydata.__dict__.items()) - log.append(items) - mydata.number = 7 - log.append(mydata.number) - - log = [] - thread = Thread(target=f) - thread.start() - thread.join() - self.assertEqual(log, [[('color', 'red'), ('initialized', 1)], 7]) - self.assertEqual(mydata.number, 2) - self.assertFalse(hasattr(mydata, 'color')) - - class MyLocal(local): - __slots__ = 'number' - - mydata = MyLocal() - mydata.number = 42 - mydata.color = 'red' - thread = Thread(target=f) - thread.start() - thread.join() - self.assertEqual(mydata.number, 7) - - -if __name__ == '__main__': - unittest.main() diff --git a/DBUtils/Tests/__init__.py b/DBUtils/Tests/__init__.py deleted file mode 100644 index 3195867..0000000 --- a/DBUtils/Tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# DBUtils Tests diff --git a/DBUtils/__init__.py b/DBUtils/__init__.py deleted file mode 100644 index 7fd5939..0000000 --- a/DBUtils/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# DBUtils package - -__all__ = [ - 'SimplePooledPg', 'SteadyPg', 'PooledPg', 'PersistentPg', - 'SimplePooledDB', 'SteadyDB', 'PooledDB', 'PersistentDB' -] - -__version__ = '1.4' - - -def InstallInWebKit(appServer): - pass diff --git a/LICENSE b/LICENSE index 9ecd470..de120c4 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2018 Christoph Zwerschke +Copyright (c) 2024 Christoph Zwerschke Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/MANIFEST.in b/MANIFEST.in index 14ffe38..835317a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,15 @@ -exclude Release.md setversion.py +include MANIFEST.in + +include LICENSE include README.md -include DBUtils/Docs/* + +include .bumpversion.cfg +include pyproject.toml +include tox.ini + +recursive-include tests *.py + +recursive-include docs *.rst make.py *.html *.css *.png +prune docs/_build + +global-exclude *.py[co] __pycache__ diff --git a/README.md b/README.md index 481657f..fa3b0ef 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,13 @@ DBUtils ======= DBUtils is a suite of tools providing solid, persistent and pooled connections -to a database that can be used in all kinds of multi-threaded environments -like Webware for Python or other web application servers. The suite supports -DB-API 2 compliant database interfaces and the classic PyGreSQL interface. +to a database that can be used in all kinds of multi-threaded environments. -The current version of DBUtils supports Python versions 2.7 and 3.5 - 3.8. +The suite supports DB-API 2 compliant database interfaces +and the classic PyGreSQL interface. -The DBUtils home page can be found here: https://webwareforpython.github.io/DBUtils/ +The current version 3.1.0 of DBUtils supports Python versions 3.7 to 3.12. + +**Please have a look at the [changelog](https://webwareforpython.github.io/DBUtils/changelog.html), because there were some breaking changes in version 2.0.** + +The DBUtils home page can be found at https://webwareforpython.github.io/DBUtils/ diff --git a/Release.md b/Release.md deleted file mode 100644 index aec81f3..0000000 --- a/Release.md +++ /dev/null @@ -1,58 +0,0 @@ -Create a new DBUtils release: -============================= - -* Check the documentation. If possible, update all translations. - (Chinese translation was too old and has been removed for the time being.) - -* Use tox to run all tests in DBUtils/Tests with all supported Python versions - and to run flake8 in order to check the code style and quality. - -* Check the examples in DBUtils/Examples with the current Webware version. - -* Update and check the Release Notes and copyright information. - -* Set version number and release date with `setversion.py`. - -* Revert to old version number for translations that have not been updated. - -* Build html pages using `buildhtml.py`. - -* Create a tag in the Git repository. - -* Create a source tarball with: - - python setup.py sdist - - You will find the tarball in the "dist" folder. - - Generally, it is better to create the release under Unix to avoid - problems with DOS line feeds and wrong file permission. - -* Upload to the Python Package Index: - - Create a .pypirc file in your home directory as follows: - - echo "[pypi] - repository: https://upload.pypi.org/legacy/ - username: your username - password: your password - - [pypitest] - repository: https://test.pypi.org/legacy/ - username: your username - password: your password - " > ~.pypirc - - -* Upload the source package to the test PyPI with: - - twine upload -r pypitest dist/*.tar.gz - -* Register and upload the project to the real PyPI with: - - twine upload -r pypi dist/*.tar.gz - -* Don't forget to update the home page: - - * https://webwareforpython.github.io/DBUtils/ - * https://webwareforpython.github.io/w4py/ diff --git a/buildhtml.py b/buildhtml.py deleted file mode 100755 index 420a64c..0000000 --- a/buildhtml.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/python3 - -"""Build HMTL from reST files.""" - -from __future__ import print_function - -from glob import glob -from os.path import splitext, join -from docutils.core import publish_file - -print("Creating the documentation...") - -for rst_file in glob(join('DBUtils', 'Docs', '*.rst')): - name = splitext(rst_file)[0] - lang = splitext(name)[1] - if lang.startswith('.'): - lang = lang[1:] - if lang == 'zh': - lang = 'zh_cn' - else: - lang = 'en' - html_file = name + '.html' - print(name, lang) - - with open(rst_file, encoding='utf-8-sig') as source: - with open(html_file, 'w', encoding='utf-8') as destination: - publish_file(writer_name='html5', - source=source, destination=destination, - settings_overrides = dict( - stylesheet_path='Doc.css', - embed_stylesheet=False, - toc_backlinks=False, - language_code=lang - ) - ) - -print("Done.") diff --git a/dbutils/__init__.py b/dbutils/__init__.py new file mode 100644 index 0000000..e503c5d --- /dev/null +++ b/dbutils/__init__.py @@ -0,0 +1,8 @@ +"""The DBUtils main package.""" + +__all__ = [ + '__version__', + 'simple_pooled_pg', 'steady_pg', 'pooled_pg', 'persistent_pg', + 'simple_pooled_db', 'steady_db', 'pooled_db', 'persistent_db'] + +__version__ = '3.1.0' diff --git a/DBUtils/PersistentDB.py b/dbutils/persistent_db.py similarity index 90% rename from DBUtils/PersistentDB.py rename to dbutils/persistent_db.py index ae383d4..7ca7f7c 100644 --- a/DBUtils/PersistentDB.py +++ b/dbutils/persistent_db.py @@ -42,7 +42,8 @@ prepare the session, e.g. ["set datestyle to german", ...]. failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever it is requested, @@ -64,7 +65,7 @@ every connection to your local database 'mydb' to be reused 1000 times: import pgdb # import used DB-API 2 module - from DBUtils.PersistentDB import PersistentDB + from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='mydb') Once you have set up the generator with these parameters, you can @@ -80,7 +81,7 @@ ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. -You can change this behavior be setting the closeable parameter. +You can change this behavior by setting the closeable parameter. Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be @@ -108,12 +109,10 @@ by Geoffrey Talvola in July 2005 Licensed under the MIT license. - """ -from DBUtils.SteadyDB import connect - -__version__ = '1.4' +from . import __version__ +from .steady_db import connect try: # Prefer the pure Python version of threading.local. @@ -138,7 +137,6 @@ class PersistentDB: After you have created the connection pool, you can use connection() to get thread-affine, steady DB-API 2 connections. - """ version = __version__ @@ -158,7 +156,8 @@ def __init__( the session, e.g. ["set datestyle to ...", "set time zone ..."] failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, @@ -170,18 +169,20 @@ def __init__( (threading.local is faster, but cannot be used in all cases) args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module - """ try: threadsafety = creator.threadsafety except AttributeError: try: - if not callable(creator.connect): - raise AttributeError + threadsafety = creator.dbapi.threadsafety except AttributeError: - threadsafety = 1 - else: - threadsafety = 0 + try: + if not callable(creator.connect): + raise AttributeError + except AttributeError: + threadsafety = 1 + else: + threadsafety = 0 if not threadsafety: raise NotSupportedError("Database module is not thread-safe.") self._creator = creator @@ -200,20 +201,20 @@ def steady_connection(self): self._failures, self._ping, self._closeable, *self._args, **self._kwargs) - def connection(self, shareable=False): + def connection(self, shareable=False): # noqa: ARG002 """Get a steady, persistent DB-API 2 connection. The shareable parameter exists only for compatibility with the PooledDB connection method. In reality, persistent connections are of course never shared with other threads. - """ try: con = self.thread.connection - except AttributeError: + except AttributeError as error: con = self.steady_connection() if not con.threadsafety(): - raise NotSupportedError("Database module is not thread-safe.") + raise NotSupportedError( + "Database module is not thread-safe.") from error self.thread.connection = con con._ping_check() return con diff --git a/DBUtils/PersistentPg.py b/dbutils/persistent_pg.py similarity index 97% rename from DBUtils/PersistentPg.py rename to dbutils/persistent_pg.py index 08ea171..16b1338 100644 --- a/DBUtils/PersistentPg.py +++ b/dbutils/persistent_pg.py @@ -54,7 +54,7 @@ For instance, if you want every connection to your local database 'mydb' to be reused 1000 times: - from DBUtils.PersistentPg import PersistentPg + from dbutils.persistent_pg import PersistentPg persist = PersistentPg(5, dbname='mydb') Once you have set up the generator with these parameters, you can @@ -70,7 +70,7 @@ ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. -You can change this behavior be setting the closeable parameter. +You can change this behavior by setting the closeable parameter. Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be @@ -99,12 +99,10 @@ by Geoffrey Talvola in July 2005 Licensed under the MIT license. - """ -from DBUtils.SteadyPg import SteadyPgConnection - -__version__ = '1.4' +from . import __version__ +from .steady_pg import SteadyPgConnection try: # Prefer the pure Python version of threading.local. @@ -121,7 +119,6 @@ class PersistentPg: After you have created the connection pool, you can use connection() to get thread-affine, steady PostgreSQL connections. - """ version = __version__ @@ -144,7 +141,6 @@ def __init__( (threading.local is faster, but cannot be used in all cases) args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() - """ self._maxusage = maxusage self._setsession = setsession diff --git a/DBUtils/PooledDB.py b/dbutils/pooled_db.py similarity index 85% rename from DBUtils/PooledDB.py rename to dbutils/pooled_db.py index 3336177..ed19740 100644 --- a/DBUtils/PooledDB.py +++ b/dbutils/pooled_db.py @@ -53,11 +53,12 @@ setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", ...] reset: how connections should be reset when returned to the pool - (False or None to rollback transcations started with begin(), + (False or None to rollback transactions started with begin(), the default value True always issues a rollback for safety's sake) failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module ping: an optional flag controlling when connections are checked with the ping() method if such a method is available (0 = None = never, 1 = default = whenever fetched from the pool, @@ -74,7 +75,7 @@ want a pool of at least five connections to your local database 'mydb': import pgdb # import used DB-API 2 module - from DBUtils.PooledDB import PooledDB + from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='mydb') Once you have set up the connection pool you can request @@ -96,7 +97,7 @@ db = pool.dedicated_connection() -If you don't need it any more, you should immediately return it to the +If you don't need it anymore, you should immediately return it to the pool with db.close(). You can get another connection in the same way. Warning: In a threaded environment, never do the following: @@ -114,6 +115,13 @@ cur.close() # or del cur db.close() # or del db +You can also use context managers for simpler code: + + with pool.connection() as db: + with db.cursor as cur: + cur.execute(...) + res = cur.fetchone() + Note that you need to explicitly start transactions by calling the begin() method. This ensures that the connection will not be shared with other threads, that the transparent reopening will be suspended @@ -136,21 +144,21 @@ by Dan Green in December 2000 Licensed under the MIT license. - """ +from contextlib import suppress +from functools import total_ordering from threading import Condition -from DBUtils.SteadyDB import connect - -__version__ = '1.4' +from . import __version__ +from .steady_db import connect class PooledDBError(Exception): """General PooledDB error.""" -class InvalidConnection(PooledDBError): +class InvalidConnectionError(PooledDBError): """Database connection is invalid.""" @@ -158,16 +166,20 @@ class NotSupportedError(PooledDBError): """DB-API module not supported by PooledDB.""" -class TooManyConnections(PooledDBError): +class TooManyConnectionsError(PooledDBError): """Too many database connections were opened.""" +# deprecated alias names for error classes +InvalidConnection = InvalidConnectionError +TooManyConnections = TooManyConnectionsError + + class PooledDB: """Pool for DB-API 2 connections. After you have created the connection pool, you can use connection() to get pooled, steady DB-API 2 connections. - """ version = __version__ @@ -202,29 +214,32 @@ def __init__( setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] reset: how connections should be reset when returned to the pool - (False or None to rollback transcations started with begin(), + (False or None to rollback transactions started with begin(), True to always issue a rollback for safety's sake) failures: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = whenever fetched from the pool, 2 = when a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module - """ try: threadsafety = creator.threadsafety except AttributeError: try: - if not callable(creator.connect): - raise AttributeError + threadsafety = creator.dbapi.threadsafety except AttributeError: - threadsafety = 2 - else: - threadsafety = 0 + try: + if not callable(creator.connect): + raise AttributeError + except AttributeError: + threadsafety = 1 + else: + threadsafety = 0 if not threadsafety: raise NotSupportedError("Database module is not thread-safe.") self._creator = creator @@ -242,8 +257,7 @@ def __init__( if maxconnections is None: maxconnections = 0 if maxcached: - if maxcached < mincached: - maxcached = mincached + maxcached = max(maxcached, mincached) self._maxcached = maxcached else: self._maxcached = 0 @@ -253,10 +267,8 @@ def __init__( else: self._maxshared = 0 if maxconnections: - if maxconnections < maxcached: - maxconnections = maxcached - if maxconnections < maxshared: - maxconnections = maxshared + maxconnections = max(maxconnections, maxcached) + maxconnections = max(maxconnections, maxshared) self._maxconnections = maxconnections else: self._maxconnections = 0 @@ -279,11 +291,9 @@ def connection(self, shareable=True): If shareable is set and the underlying DB-API 2 allows it, then the connection may be shared with other threads. - """ if shareable and self._maxshared: - self._lock.acquire() - try: + with self._lock: while (not self._shared_cache and self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() @@ -311,12 +321,9 @@ def connection(self, shareable=True): # put the connection (back) into the shared cache self._shared_cache.append(con) self._lock.notify() - finally: - self._lock.release() con = PooledSharedDBConnection(self, con) else: # try to get a dedicated connection - self._lock.acquire() - try: + with self._lock: while (self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() @@ -329,8 +336,6 @@ def connection(self, shareable=True): con._ping_check() # check connection con = PooledDedicatedDBConnection(self, con) self._connections += 1 - finally: - self._lock.release() return con def dedicated_connection(self): @@ -339,24 +344,19 @@ def dedicated_connection(self): def unshare(self, con): """Decrease the share of a connection in the shared cache.""" - self._lock.acquire() - try: + with self._lock: con.unshare() shared = con.shared - if not shared: # connection is idle, - try: # so try to remove it - self._shared_cache.remove(con) # from shared cache - except ValueError: - pass # pool has already been closed - finally: - self._lock.release() + if not shared: # connection is idle + # try to remove it from shared cache + with suppress(ValueError): # if pool has already been closed + self._shared_cache.remove(con) if not shared: # connection has become idle, self.cache(con.con) # so add it to the idle cache def cache(self, con): """Put a dedicated connection back into the idle cache.""" - self._lock.acquire() - try: + with self._lock: if not self._maxcached or len(self._idle_cache) < self._maxcached: con._reset(force=self._reset) # rollback possible transaction # the idle cache is not full, so put it there @@ -365,42 +365,34 @@ def cache(self, con): con.close() # then close the connection self._connections -= 1 self._lock.notify() - finally: - self._lock.release() def close(self): """Close all connections in the pool.""" - self._lock.acquire() - try: + with self._lock: while self._idle_cache: # close all idle connections con = self._idle_cache.pop(0) - try: + with suppress(Exception): con.close() - except Exception: - pass if self._maxshared: # close all shared connections while self._shared_cache: con = self._shared_cache.pop(0).con - try: + with suppress(Exception): con.close() - except Exception: - pass self._connections -= 1 - self._lock.notifyAll() - finally: - self._lock.release() + self._lock.notify_all() def __del__(self): """Delete the pool.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() - except Exception: + except: # noqa: E722, S110 pass def _wait_lock(self): """Wait until notified or report an error.""" if not self._blocking: - raise TooManyConnections + raise TooManyConnectionsError self._lock.wait() @@ -414,7 +406,6 @@ def __init__(self, pool, con): pool: the corresponding PooledDB instance con: the underlying SteadyDB connection - """ # basic initialization to make finalizer work self._con = None @@ -436,17 +427,26 @@ def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) - else: - raise InvalidConnection + raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() - except Exception: + except: # noqa: E722, S110 pass + def __enter__(self): + """Enter a runtime context for the connection.""" + return self + + def __exit__(self, *exc): + """Exit a runtime context for the connection.""" + self.close() + +@total_ordering class SharedDBConnection: """Auxiliary class for shared connections.""" @@ -454,36 +454,21 @@ def __init__(self, con): """Create a shared connection. con: the underlying SteadyDB connection - """ self.con = con self.shared = 1 def __lt__(self, other): + """Check whether this connection should come before the other one.""" if self.con._transaction == other.con._transaction: return self.shared < other.shared - else: - return not self.con._transaction - - def __le__(self, other): - if self.con._transaction == other.con._transaction: - return self.shared <= other.shared - else: - return not self.con._transaction + return not self.con._transaction def __eq__(self, other): + """Check whether this connection is the same as the other one.""" return (self.con._transaction == other.con._transaction and self.shared == other.shared) - def __ne__(self, other): - return not self.__eq__(other) - - def __gt__(self, other): - return other.__lt__(self) - - def __ge__(self, other): - return other.__le__(self) - def share(self): """Increase the share of this connection.""" self.shared += 1 @@ -501,7 +486,6 @@ def __init__(self, pool, shared_con): pool: the corresponding PooledDB instance con: the underlying SharedDBConnection - """ # basic initialization to make finalizer work self._con = None @@ -525,12 +509,20 @@ def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) - else: - raise InvalidConnection + raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() - except Exception: + except: # noqa: E722, S110 pass + + def __enter__(self): + """Enter a runtime context for the connection.""" + return self + + def __exit__(self, *exc): + """Exit a runtime context for the connection.""" + self.close() diff --git a/DBUtils/PooledPg.py b/dbutils/pooled_pg.py similarity index 82% rename from DBUtils/PooledPg.py rename to dbutils/pooled_pg.py index c451dec..18585df 100644 --- a/DBUtils/PooledPg.py +++ b/dbutils/pooled_pg.py @@ -55,7 +55,7 @@ For instance, if you want a pool of at least five connections to your local database 'mydb': - from DBUtils.PooledPg import PooledPg + from dbutils.pooled_pg import PooledPg pool = PooledPg(5, dbname='mydb') Once you have set up the connection pool you can request @@ -68,7 +68,7 @@ proxy class for the hardened SteadyPg version of the connection. The connection will not be shared with other threads. If you don't need -it any more, you should immediately return it to the pool with db.close(). +it anymore, you should immediately return it to the pool with db.close(). You can get another connection in the same way or with db.reopen(). Warning: In a threaded environment, never do the following: @@ -83,6 +83,11 @@ res = db.query(...).getresult() db.close() # or del db +You can also a context manager for simpler code: + + with pool.connection() as db: + res = db.query(...).getresult() + Note that you need to explicitly start transactions by calling the begin() method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will @@ -105,37 +110,41 @@ by Dan Green in December 2000 Licensed under the MIT license. - """ -try: - from Queue import Queue, Empty, Full -except ImportError: # Python 3 - from queue import Queue, Empty, Full +from contextlib import suppress +from queue import Empty, Full, Queue -from DBUtils.SteadyPg import SteadyPgConnection +from . import __version__ +from .steady_pg import SteadyPgConnection -__version__ = '1.4' +# constants for "reset" parameter +RESET_ALWAYS_ROLLBACK = 1 +RESET_COMPLETELY = 2 class PooledPgError(Exception): """General PooledPg error.""" -class InvalidConnection(PooledPgError): +class InvalidConnectionError(PooledPgError): """Database connection is invalid.""" -class TooManyConnections(PooledPgError): +class TooManyConnectionsError(PooledPgError): """Too many database connections were opened.""" +# deprecated alias names for error classes +InvalidConnection = InvalidConnectionError +TooManyConnections = TooManyConnectionsError + + class PooledPg: """Pool for classic PyGreSQL connections. After you have created the connection pool, you can use connection() to get pooled, steady PostgreSQL connections. - """ version = __version__ @@ -163,11 +172,10 @@ def __init__( setsession: optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to ...", "set time zone ..."] reset: how connections should be reset when returned to the pool - (0 or None to rollback transcations started with begin(), + (0 or None to rollback transactions started with begin(), 1 to always issue a rollback, 2 for a complete reset) args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using class PyGreSQL pg.DB() - """ self._args, self._kwargs = args, kwargs self._maxusage = maxusage @@ -179,12 +187,10 @@ def __init__( maxcached = 0 if maxconnections is None: maxconnections = 0 - if maxcached: - if maxcached < mincached: - maxcached = mincached + if maxcached and maxcached < mincached: + maxcached = mincached if maxconnections: - if maxconnections < maxcached: - maxconnections = maxcached + maxconnections = max(maxconnections, maxcached) # Create semaphore for number of allowed connections generally: from threading import Semaphore self._connections = Semaphore(maxconnections) @@ -204,11 +210,10 @@ def steady_connection(self): def connection(self): """Get a steady, cached PostgreSQL connection from the pool.""" - if self._connections: - if not self._connections.acquire(self._blocking): - raise TooManyConnections + if self._connections and not self._connections.acquire(self._blocking): + raise TooManyConnectionsError try: - con = self._cache.get(0) + con = self._cache.get_nowait() except Empty: con = self.steady_connection() return PooledPgConnection(self, con) @@ -216,15 +221,12 @@ def connection(self): def cache(self, con): """Put a connection back into the pool cache.""" try: - if self._reset == 2: + if self._reset == RESET_COMPLETELY: con.reset() # reset the connection completely - else: - if self._reset or con._transaction: - try: - con.rollback() # rollback a possible transaction - except Exception: - pass - self._cache.put(con, 0) # and then put it back into the cache + elif self._reset == RESET_ALWAYS_ROLLBACK or con._transaction: + with suppress(Exception): + con.rollback() # rollback a possible transaction + self._cache.put_nowait(con) # and then put it back into the cache except Full: con.close() if self._connections: @@ -234,11 +236,9 @@ def close(self): """Close all connections in the pool.""" while 1: try: - con = self._cache.get(0) - try: + con = self._cache.get_nowait() + with suppress(Exception): con.close() - except Exception: - pass if self._connections: self._connections.release() except Empty: @@ -246,9 +246,10 @@ def close(self): def __del__(self): """Delete the pool.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() - except Exception: + except: # noqa: E722, S110 pass @@ -262,7 +263,6 @@ def __init__(self, pool, con): pool: the corresponding PooledPg instance con: the underlying SteadyPg connection - """ self._pool = pool self._con = con @@ -270,7 +270,7 @@ def __init__(self, pool, con): def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, - # return it to the pool so it can be reused. + # return it to the pool so that it can be reused. if self._con: self._pool.cache(self._con) self._con = None @@ -289,12 +289,20 @@ def __getattr__(self, name): """Proxy all members of the class.""" if self._con: return getattr(self._con, name) - else: - raise InvalidConnection + raise InvalidConnectionError def __del__(self): """Delete the pooled connection.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() - except Exception: + except: # noqa: E722, S110 pass + + def __enter__(self): + """Enter a runtime context for the connection.""" + return self + + def __exit__(self, *exc): + """Exit a runtime context for the connection.""" + self.close() diff --git a/DBUtils/SimplePooledDB.py b/dbutils/simple_pooled_db.py similarity index 90% rename from DBUtils/SimplePooledDB.py rename to dbutils/simple_pooled_db.py index 3318163..2558814 100644 --- a/DBUtils/SimplePooledDB.py +++ b/dbutils/simple_pooled_db.py @@ -26,7 +26,7 @@ to be cached in the pool and the connection parameters, e.g. import pgdb # import used DB-API 2 module - from DBUtils.SimplePooledDB import PooledDB + from dbutils.simple_pooled_db import PooledDB dbpool = PooledDB(pgdb, 5, host=..., database=..., user=..., ...) you can demand database connections from that pool, @@ -47,7 +47,7 @@ * Connections should have some sort of maximum usage limit after which they should be automatically closed and reopened. * Prefer or enforce thread-affinity for the connections, -allowing for both sharable and non-sharable connections. +allowing for both shareable and non-shareable connections. Please note that these and other ideas have been already implemented in in PooledDB, a more sophisticated version @@ -70,10 +70,9 @@ by Christoph Zwerschke in September 2005 Licensed under the MIT license. - """ -__version__ = '1.4' +from . import __version__ class PooledDBError(Exception): @@ -89,26 +88,28 @@ class PooledDBConnection: You don't normally deal with this class directly, but use PooledDB to get new connections. - """ def __init__(self, pool, con): + """Initialize pooled connection.""" self._con = con self._pool = pool def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, - # return it to the pool so it can be reused. + # return it to the pool so that it can be reused. if self._con is not None: self._pool.returnConnection(self._con) self._con = None def __getattr__(self, name): - # All other members are the same. + """Get the attribute with the given name.""" + # All other attributes are the same. return getattr(self._con, name) def __del__(self): + """Delete the pooled connection.""" self.close() @@ -117,7 +118,6 @@ class PooledDB: After you have created the connection pool, you can get connections using getConnection(). - """ version = __version__ @@ -129,7 +129,6 @@ def __init__(self, dbapi, maxconnections, *args, **kwargs): maxconnections: the number of connections cached in the pool args, kwargs: the parameters that shall be used to establish the database connections using connect() - """ try: threadsafety = dbapi.threadsafety @@ -138,14 +137,11 @@ def __init__(self, dbapi, maxconnections, *args, **kwargs): if threadsafety == 0: raise NotSupportedError( "Database module does not support any level of threading.") - elif threadsafety == 1: + if threadsafety == 1: # If there is no connection level safety, build # the pool using the synchronized queue class # that implements all the required locking semantics. - try: - from Queue import Queue - except ImportError: # Python 3 - from queue import Queue + from queue import Queue self._queue = Queue(maxconnections) # create the queue self.connection = self._unthreadsafe_get_connection self.addConnection = self._unthreadsafe_add_connection @@ -166,7 +162,7 @@ def __init__(self, dbapi, maxconnections, *args, **kwargs): "Database module threading support cannot be determined.") # Establish all database connections (it would be better to # only establish a part of them now, and the rest on demand). - for i in range(maxconnections): + for _i in range(maxconnections): self.addConnection(dbapi.connect(*args, **kwargs)) # The following functions are used with DB-API 2 modules @@ -190,7 +186,6 @@ def _unthreadsafe_return_connection(self, con): back into the queue after they have been used. This is done automatically when the connection is closed and should never be called explicitly outside of this module. - """ self._unthreadsafe_add_connection(con) @@ -201,17 +196,14 @@ def _unthreadsafe_return_connection(self, con): def _threadsafe_get_connection(self): """Get a connection from the pool.""" - self._lock.acquire() - try: - next = self._nextConnection - con = PooledDBConnection(self, self._connections[next]) - next += 1 - if next >= len(self._connections): - next = 0 - self._nextConnection = next + with self._lock: + next_con = self._nextConnection + con = PooledDBConnection(self, self._connections[next_con]) + next_con += 1 + if next_con >= len(self._connections): + next_con = 0 + self._nextConnection = next_con return con - finally: - self._lock.release() def _threadsafe_add_connection(self, con): """Add a connection to the pool.""" @@ -222,6 +214,5 @@ def _threadsafe_return_connection(self, con): In this case, the connections always stay in the pool, so there is no need to do anything here. - """ - pass + # we don't need to do anything here diff --git a/DBUtils/SimplePooledPg.py b/dbutils/simple_pooled_pg.py similarity index 90% rename from DBUtils/SimplePooledPg.py rename to dbutils/simple_pooled_pg.py index 84cd1a2..5997e27 100644 --- a/DBUtils/SimplePooledPg.py +++ b/dbutils/simple_pooled_pg.py @@ -26,7 +26,7 @@ number of connections to be cached in the pool and the connection parameters, e.g. - from DBUtils.SimplePooledPg import PooledPg + from dbutils.simple_pooled_pg import PooledPg dbpool = PooledPg(5, host=..., database=..., user=..., ...) you can demand database connections from that pool, @@ -65,12 +65,11 @@ by Dan Green in December 2000 Licensed under the MIT license. - """ -from pg import DB as PgConnection +from pg import DB as PgConnection # noqa: N811 -__version__ = '1.4' +from . import __version__ class PooledPgConnection: @@ -78,26 +77,28 @@ class PooledPgConnection: You don't normally deal with this class directly, but use PooledPg to get new connections. - """ def __init__(self, pool, con): + """Initialize pooled connection.""" self._con = con self._pool = pool def close(self): """Close the pooled connection.""" # Instead of actually closing the connection, - # return it to the pool so it can be reused. + # return it to the pool so that it can be reused. if self._con is not None: self._pool.cache(self._con) self._con = None def __getattr__(self, name): - # All other members are the same. + """Get the attribute with the given name.""" + # All other attributes are the same. return getattr(self._con, name) def __del__(self): + """Delete the pooled connection.""" self.close() @@ -106,7 +107,6 @@ class PooledPg: After you have created the connection pool, you can get connections using getConnection(). - """ version = __version__ @@ -117,19 +117,15 @@ def __init__(self, maxconnections, *args, **kwargs): maxconnections: the number of connections cached in the pool args, kwargs: the parameters that shall be used to establish the PostgreSQL connections using pg.connect() - """ # Since there is no connection level safety, we # build the pool using the synchronized queue class # that implements all the required locking semantics. - try: - from Queue import Queue - except ImportError: # Python 3 - from queue import Queue + from queue import Queue self._queue = Queue(maxconnections) # Establish all database connections (it would be better to # only establish a part of them now, and the rest on demand). - for i in range(maxconnections): + for _i in range(maxconnections): self.cache(PgConnection(*args, **kwargs)) def cache(self, con): diff --git a/DBUtils/SteadyDB.py b/dbutils/steady_db.py similarity index 87% rename from DBUtils/SteadyDB.py rename to dbutils/steady_db.py index 6ad6423..8b6f11f 100644 --- a/DBUtils/SteadyDB.py +++ b/dbutils/steady_db.py @@ -58,7 +58,7 @@ without further notice. import pgdb # import used DB-API 2 module - from DBUtils.SteadyDB import connect + from dbutils.steady_db import connect db = connect(pgdb, 10000, ["set datestyle to german"], host=..., database=..., user=..., ...) ... @@ -87,31 +87,32 @@ suggested by Ezio Vernacotola in December 2006 Licensed under the MIT license. - """ import sys +from contextlib import suppress -__version__ = '1.4' - -try: - baseint = (int, long) -except NameError: # Python 3 - baseint = int +from . import __version__ class SteadyDBError(Exception): """General SteadyDB error.""" -class InvalidCursor(SteadyDBError): +class InvalidCursorError(SteadyDBError): """Database cursor is invalid.""" +# deprecated alias names for error classes +InvalidCursor = InvalidCursorError + + def connect( creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=True, *args, **kwargs): - """A tough version of the connection constructor of a DB-API 2 module. + """Create a "tough" connection. + + A hardened version of the connection function of a DB-API 2 module. creator: either an arbitrary function returning new DB-API 2 compliant connection objects or a DB-API 2 compliant database module @@ -123,7 +124,8 @@ def connect( the session, e.g. ["set datestyle to german", "set time zone mez"] failures: an optional exception class or a tuple of exception classes for which the failover mechanism shall be applied, if the default - (OperationalError, InternalError) is not adequate + (OperationalError, InternalError, Interface) is not adequate + for the used database module ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = when _ping_check() is called, 2 = whenever a cursor is created, 4 = when a query is executed, @@ -132,7 +134,6 @@ def connect( be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module - """ return SteadyDBConnection( creator, maxusage, setsession, @@ -140,7 +141,7 @@ def connect( class SteadyDBConnection: - """A "tough" version of DB-API 2 connections.""" + """A hardened version of DB-API 2 connections.""" version = __version__ @@ -154,7 +155,11 @@ def __init__( # proper initialization of the connection try: self._creator = creator.connect - self._dbapi = creator + try: + if creator.dbapi.connect: + self._dbapi = creator.dbapi + except AttributeError: + self._dbapi = creator except AttributeError: # try finding the DB-API 2 module via the connection creator self._creator = creator @@ -175,10 +180,10 @@ def __init__( except AttributeError: self._threadsafety = None if not callable(self._creator): - raise TypeError("%r is not a connection provider." % (creator,)) + raise TypeError(f"{creator!r} is not a connection provider.") if maxusage is None: maxusage = 0 - if not isinstance(maxusage, baseint): + if not isinstance(maxusage, int): raise TypeError("'maxusage' must be an integer value.") self._maxusage = maxusage self._setsession_sql = setsession @@ -199,7 +204,6 @@ def __exit__(self, *exc): """Exit the runtime context for the connection object. This does not close the connection, but it ends a transaction. - """ if exc[0] is None and exc[1] is None and exc[2] is None: self.commit() @@ -229,10 +233,7 @@ def _create(self): else: break i = mod.rfind('.') - if i < 0: - mod = None - else: - mod = mod[:i] + mod = None if i < 0 else mod[:i] else: try: mod = con.OperationalError.__module__ @@ -248,38 +249,38 @@ def _create(self): else: break i = mod.rfind('.') - if i < 0: - mod = None - else: - mod = mod[:i] + mod = None if i < 0 else mod[:i] else: self._dbapi = None if self._threadsafety is None: try: self._threadsafety = self._dbapi.threadsafety except AttributeError: - try: + with suppress(AttributeError): self._threadsafety = con.threadsafety - except AttributeError: - pass if self._failures is None: try: self._failures = ( self._dbapi.OperationalError, + self._dbapi.InterfaceError, self._dbapi.InternalError) except AttributeError: try: self._failures = ( self._creator.OperationalError, + self._creator.InterfaceError, self._creator.InternalError) except AttributeError: try: self._failures = ( - con.OperationalError, con.InternalError) - except AttributeError: + con.OperationalError, + con.InterfaceError, + con.InternalError) + except AttributeError as error: raise AttributeError( "Could not determine failure exceptions" - " (please set failures or creator.dbapi).") + " (please set failures or creator.dbapi).", + ) from error if isinstance(self._failures, tuple): self._failure = self._failures[0] else: @@ -288,10 +289,8 @@ def _create(self): except Exception as error: # the database module could not be determined # or the session could not be prepared - try: # close the connection first - con.close() - except Exception: - pass + with suppress(Exception): + con.close() # close the connection first raise error # re-raise the original error again return con @@ -315,15 +314,12 @@ def _store(self, con): def _close(self): """Close the tough connection. - You can always close a tough connection with this method + You can always close a tough connection with this method, and it will not complain if you close it more than once. - """ if not self._closed: - try: + with suppress(Exception): self._con.close() - except Exception: - pass self._transaction = False self._closed = True @@ -331,21 +327,17 @@ def _reset(self, force=False): """Reset a tough connection. Rollback if forced or the connection was in a transaction. - """ if not self._closed and (force or self._transaction): - try: + with suppress(Exception): self.rollback() - except Exception: - pass def _ping_check(self, ping=1, reconnect=True): """Check whether the connection is still alive using ping(). - If the the underlying connection is not active and the ping + If the underlying connection is not active and the ping parameter is set accordingly, the connection will be recreated unless the connection is currently inside a transaction. - """ if ping & self._ping: try: # if possible, ping the connection @@ -367,13 +359,14 @@ def _ping_check(self, ping=1, reconnect=True): if reconnect and not self._transaction: try: # try to reopen the connection con = self._create() - except Exception: + except Exception: # noqa: S110 pass else: self._close() self._store(con) alive = True return alive + return None def dbapi(self): """Return the underlying DB-API 2 module of the connection.""" @@ -396,13 +389,12 @@ def threadsafety(self): def close(self): """Close the tough connection. - You are allowed to close a tough connection by default + You are allowed to close a tough connection by default, and it will not complain if you close it more than once. You can disallow closing connections by setting the closeable parameter to something false. In this case, closing tough connections will be silently ignored. - """ if self._closeable: self._close() @@ -417,7 +409,6 @@ def begin(self, *args, **kwargs): If the underlying driver supports this method, it will be called with the given parameters (e.g. for distributed transactions). - """ self._transaction = True try: @@ -435,7 +426,7 @@ def commit(self): except self._failures as error: # cannot commit try: # try to reopen the connection con = self._create() - except Exception: + except Exception: # noqa: S110 pass else: self._close() @@ -450,7 +441,7 @@ def rollback(self): except self._failures as error: # cannot rollback try: # try to reopen the connection con = self._create() - except Exception: + except Exception: # noqa: S110 pass else: self._close() @@ -461,7 +452,6 @@ def cancel(self): """Cancel a long-running transaction. If the underlying driver supports this method, it will be called. - """ self._transaction = False try: @@ -476,7 +466,10 @@ def ping(self, *args, **kwargs): return self._con.ping(*args, **kwargs) def _cursor(self, *args, **kwargs): - """A "tough" version of the method cursor().""" + """Create a "tough" cursor. + + This is a hardened version of the method cursor(). + """ # The args and kwargs are not part of the standard, # but some database modules seem to use these. transaction = self._transaction @@ -491,12 +484,12 @@ def _cursor(self, *args, **kwargs): except self._failures as error: # error in getting cursor try: # try to reopen the connection con = self._create() - except Exception: + except Exception: # noqa: S110 pass else: try: # and try one more time to get a cursor cursor = con.cursor(*args, **kwargs) - except Exception: + except Exception: # noqa: S110 pass else: self._close() @@ -504,10 +497,8 @@ def _cursor(self, *args, **kwargs): if transaction: raise error # re-raise the original error again return cursor - try: + with suppress(Exception): con.close() - except Exception: - pass if transaction: self._transaction = False raise error # re-raise the original error again @@ -519,14 +510,15 @@ def cursor(self, *args, **kwargs): def __del__(self): """Delete the steady connection.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self._close() # make sure the connection is closed - except Exception: + except: # noqa: E722, S110 pass class SteadyDBCursor: - """A "tough" version of DB-API 2 cursors.""" + """A hardened version of DB-API 2 cursors.""" def __init__(self, con, *args, **kwargs): """Create a "tough" DB-API 2 cursor.""" @@ -539,8 +531,8 @@ def __init__(self, con, *args, **kwargs): self._clearsizes() try: self._cursor = con._cursor(*args, **kwargs) - except AttributeError: - raise TypeError("%r is not a SteadyDBConnection." % (con,)) + except AttributeError as error: + raise TypeError(f"{con!r} is not a SteadyDBConnection.") from error self._closed = False def __enter__(self): @@ -551,6 +543,14 @@ def __exit__(self, *exc): """Exit the runtime context for the cursor object.""" self.close() + def __iter__(self): + """Make cursor compatible to the iteration protocol.""" + cursor = self._cursor + try: # use iterator provided by original cursor + return iter(cursor) + except TypeError: # create iterator if not provided + return iter(cursor.fetchone, None) + def setinputsizes(self, sizes): """Store input sizes in case cursor needs to be reopened.""" self._inputsizes = sizes @@ -580,13 +580,10 @@ def close(self): """Close the tough cursor. It will not complain if you close it more than once. - """ if not self._closed: - try: + with suppress(Exception): self._cursor.close() - except Exception: - pass self._closed = True def _get_tough_method(self, name): @@ -613,7 +610,7 @@ def tough_method(*args, **kwargs): try: cursor2 = con._cursor( *self._args, **self._kwargs) # open new cursor - except Exception: + except Exception: # noqa: S110 pass else: try: # and try one more time to execute @@ -623,26 +620,24 @@ def tough_method(*args, **kwargs): result = method(*args, **kwargs) if execute: self._clearsizes() - except Exception: + except Exception: # noqa: S110 pass else: self.close() self._cursor = cursor2 con._usage += 1 return result - try: + with suppress(Exception): cursor2.close() - except Exception: - pass try: # try to reopen the connection con2 = con._create() - except Exception: + except Exception: # noqa: S110 pass else: try: cursor2 = con2.cursor( *self._args, **self._kwargs) # open new cursor - except Exception: + except Exception: # noqa: S110 pass else: if transaction: @@ -656,6 +651,8 @@ def tough_method(*args, **kwargs): if execute: self._setsizes(cursor2) method2 = getattr(cursor2, name) + # if the following call hangs, + # you may have forgotten to call begin() result = method2(*args, **kwargs) if execute: self._clearsizes() @@ -676,14 +673,10 @@ def tough_method(*args, **kwargs): if error2: raise error2 # raise the other error return result - try: + with suppress(Exception): cursor2.close() - except Exception: - pass - try: + with suppress(Exception): con2.close() - except Exception: - pass if transaction: self._transaction = False raise error # re-raise the original error again @@ -698,14 +691,13 @@ def __getattr__(self, name): if name.startswith(('execute', 'call')): # make execution methods "tough" return self._get_tough_method(name) - else: - return getattr(self._cursor, name) - else: - raise InvalidCursor + return getattr(self._cursor, name) + raise InvalidCursorError def __del__(self): """Delete the steady cursor.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self.close() # make sure the cursor is closed - except Exception: + except: # noqa: E722, S110 pass diff --git a/DBUtils/SteadyPg.py b/dbutils/steady_pg.py similarity index 87% rename from DBUtils/SteadyPg.py rename to dbutils/steady_pg.py index 5c10be8..d17d3bb 100644 --- a/DBUtils/SteadyPg.py +++ b/dbutils/steady_pg.py @@ -45,7 +45,7 @@ database is lost or has been used too often, it will be automatically reset, without further notice. - from DBUtils.SteadyPg import SteadyPgConnection + from dbutils.steady_pg import SteadyPgConnection db = SteadyPgConnection(10000, ["set datestyle to german"], host=..., dbname=..., user=..., ...) ... @@ -67,27 +67,27 @@ by Christoph Zwerschke in September 2005 Licensed under the MIT license. - """ -from pg import DB as PgConnection +from contextlib import suppress -__version__ = '1.4' +from pg import DB as PgConnection # noqa: N811 -try: - baseint = (int, long) -except NameError: # Python 3 - baseint = int +from . import __version__ class SteadyPgError(Exception): """General SteadyPg error.""" -class InvalidConnection(SteadyPgError): +class InvalidConnectionError(SteadyPgError): """Database connection is invalid.""" +# deprecated alias names for error classes +InvalidConnection = InvalidConnectionError + + class SteadyPgConnection: """Class representing steady connections to a PostgreSQL database. @@ -98,7 +98,6 @@ class SteadyPgConnection: If you want the connection to be persistent in a threaded environment, then you should not deal with this class directly, but use either the PooledPg module or the PersistentPg module to get the connections. - """ version = __version__ @@ -108,6 +107,8 @@ def __init__( *args, **kwargs): """Create a "tough" PostgreSQL connection. + A hardened version of the DB wrapper class of PyGreSQL. + maxusage: maximum usage limit for the underlying PyGreSQL connection (number of uses, 0 or None means unlimited usage) When this limit is reached, the connection is automatically reset. @@ -117,7 +118,6 @@ def __init__( be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be used to establish the PostgreSQL connections with PyGreSQL using pg.DB() - """ # basic initialization to make finalizer work self._con = None @@ -125,7 +125,7 @@ def __init__( # proper initialization of the connection if maxusage is None: maxusage = 0 - if not isinstance(maxusage, baseint): + if not isinstance(maxusage, int): raise TypeError("'maxusage' must be an integer value.") self._maxusage = maxusage self._setsession_sql = setsession @@ -157,28 +157,24 @@ def _setsession(self): def _close(self): """Close the tough connection. - You can always close a tough connection with this method + You can always close a tough connection with this method, and it will not complain if you close it more than once. - """ if not self._closed: - try: + with suppress(Exception): self._con.close() - except Exception: - pass self._transaction = False self._closed = True def close(self): """Close the tough connection. - You are allowed to close a tough connection by default + You are allowed to close a tough connection by default, and it will not complain if you close it more than once. You can disallow closing connections by setting the closeable parameter to something false. In this case, closing tough connections will be silently ignored. - """ if self._closeable: self._close() @@ -189,17 +185,14 @@ def reopen(self): """Reopen the tough connection. It will not complain if the connection cannot be reopened. - """ try: self._con.reopen() except Exception: - if self._transcation: + if self._transaction: self._transaction = False - try: + with suppress(Exception): self._con.query('rollback') - except Exception: - pass else: self._transaction = False self._closed = False @@ -211,7 +204,6 @@ def reset(self): If a reset is not possible, tries to reopen the connection. It will not complain if the connection is already closed. - """ try: self._con.reset() @@ -222,10 +214,8 @@ def reset(self): try: self.reopen() except Exception: - try: + with suppress(Exception): self.rollback() - except Exception: - pass def begin(self, sql=None): """Begin a transaction.""" @@ -236,10 +226,7 @@ def begin(self, sql=None): return self._con.query(sql or 'begin') else: # use existing method if available - if sql: - return begin(sql=sql) - else: - return begin() + return begin(sql=sql) if sql else begin() def end(self, sql=None): """Commit the current transaction.""" @@ -249,10 +236,7 @@ def end(self, sql=None): except AttributeError: return self._con.query(sql or 'end') else: - if sql: - return end(sql=sql) - else: - return end() + return end(sql=sql) if sql else end() def commit(self, sql=None): """Commit the current transaction.""" @@ -262,10 +246,7 @@ def commit(self, sql=None): except AttributeError: return self._con.query(sql or 'commit') else: - if sql: - return commit(sql=sql) - else: - return commit() + return commit(sql=sql) if sql else commit() def rollback(self, sql=None): """Rollback the current transaction.""" @@ -275,10 +256,7 @@ def rollback(self, sql=None): except AttributeError: return self._con.query(sql or 'rollback') else: - if sql: - return rollback(sql=sql) - else: - return rollback() + return rollback(sql=sql) if sql else rollback() def _get_tough_method(self, method): """Return a "tough" version of a connection class method. @@ -286,7 +264,6 @@ def _get_tough_method(self, method): The tough version checks whether the connection is bad (lost) and automatically and transparently tries to reset the connection if this is the case (for instance, the database has been restarted). - """ def tough_method(*args, **kwargs): transaction = self._transaction @@ -305,11 +282,10 @@ def tough_method(*args, **kwargs): if transaction: # inside a transaction self._transaction = False raise # propagate the error - elif self._con.db.status: # if it was not a connection problem + if self._con.db.status: # if it was not a connection problem raise # then propagate the error - else: # otherwise - self.reset() # reset the connection - result = method(*args, **kwargs) # and try one more time + self.reset() # reset the connection + result = method(*args, **kwargs) # and try one more time self._usage += 1 return result return tough_method @@ -318,7 +294,6 @@ def __getattr__(self, name): """Inherit the members of the standard connection class. Some methods are made "tougher" than in the standard version. - """ if self._con: attr = getattr(self._con, name) @@ -326,12 +301,12 @@ def __getattr__(self, name): or name.startswith('get_')): attr = self._get_tough_method(attr) return attr - else: - raise InvalidConnection + raise InvalidConnectionError def __del__(self): """Delete the steady connection.""" - try: + # builtins (including Exceptions) might not exist anymore + try: # noqa: SIM105 self._close() # make sure the connection is closed - except Exception: + except: # noqa: E722, S110 pass diff --git a/docs/changelog.html b/docs/changelog.html new file mode 100644 index 0000000..0707d36 --- /dev/null +++ b/docs/changelog.html @@ -0,0 +1,251 @@ +<!DOCTYPE html> +<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> +<head> +<meta charset="utf-8" /> +<meta name="generator" content="Docutils 0.21.2: https://docutils.sourceforge.io/" /> +<meta name="viewport" content="width=device-width, initial-scale=1" /> +<title>Changelog for DBUtils</title> +<link rel="stylesheet" href="doc.css" type="text/css" /> +</head> +<body> +<main id="changelog-for-dbutils"> +<h1 class="title">Changelog for DBUtils</h1> + +<section id="section-1"> +<h2>3.1.0</h2> +<p>DBUtils 3.1.0 was released on March 17, 2024.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Support Python version 3.12, cease support for Python 3.6.</p></li> +<li><p>Various small internal improvements and modernizations.</p></li> +</ul> +</section> +<section id="section-2"> +<h2>3.0.3</h2> +<p>DBUtils 3.0.3 was released on April 27, 2023.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Support Python version 3.11.</p></li> +<li><p>Improve determination of DB API module if creator is specified.</p></li> +<li><p>Minor fixes and section an advanced usage in docs.</p></li> +</ul> +</section> +<section id="section-3"> +<h2>3.0.2</h2> +<p>DBUtils 3.0.2 was released on January 14, 2022.</p> +<p>The optional iterator protocol on cursors is now supported.</p> +</section> +<section id="section-4"> +<h2>3.0.1</h2> +<p>DBUtils 3.0.1 was released on December 22, 2021.</p> +<p>It includes <span class="docutils literal">InterfaceError</span> to the default list of exceptions +for which the connection failover mechanism is applied. +You can override this with the <span class="docutils literal">failures</span> parameter.</p> +</section> +<section id="section-5"> +<h2>3.0.0</h2> +<p>DBUtils 3.0.0 was released on November 26, 2021.</p> +<p>It is intended to be used with Python versions 3.6 to 3.10.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Cease support for Python 2 and 3.5, minor optimizations.</p></li> +</ul> +</section> +<section id="section-6"> +<h2>2.0.3</h2> +<p>DBUtils 2.0.3 was released on November 26, 2021.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Support Python version 3.10.</p></li> +</ul> +</section> +<section id="section-7"> +<h2>2.0.2</h2> +<p>DBUtils 2.0.2 was released on June 8, 2021.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Allow using context managers for pooled connections.</p></li> +</ul> +</section> +<section id="section-8"> +<h2>2.0.1</h2> +<p>DBUtils 2.0.1 was released on April 8, 2021.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Avoid "name Exception is not defined" when exiting.</p></li> +</ul> +</section> +<section id="section-9"> +<h2>2.0</h2> +<p>DBUtils 2.0 was released on September 26, 2020.</p> +<p>It is intended to be used with Python versions 2.7 and 3.5 to 3.9.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>DBUtils does not act as a Webware plugin anymore, it is now just an ordinary +Python package (of course it could be used as such also before).</p></li> +<li><p>The Webware <span class="docutils literal">Examples</span> folder has been removed.</p></li> +<li><p>Folders, packages and modules have been renamed to lower-case. +Particularly, you need to import <span class="docutils literal">dbutils</span> instead of <span class="docutils literal">DBUtils</span> now.</p></li> +<li><p>The internal naming conventions have also been changed to comply with PEP8.</p></li> +<li><p>The documentation has been adapted to reflect the changes in this version.</p></li> +<li><p>This changelog has been compiled from the former release notes.</p></li> +</ul> +</section> +<section id="section-10"> +<h2>1.4</h2> +<p>DBUtils 1.4 was released on September 26, 2020.</p> +<p>It is intended to be used with Python versions 2.7 and 3.5 to 3.9.</p> +<p>Improvements:</p> +<ul class="simple"> +<li><p>The <span class="docutils literal">SteadyDB</span> and <span class="docutils literal">SteadyPg</span> classes only reconnect after the +<span class="docutils literal">maxusage</span> limit has been reached when the connection is not currently +inside a transaction.</p></li> +</ul> +</section> +<section id="section-11"> +<h2>1.3</h2> +<p>DBUtils 1.3 was released on March 3, 2018.</p> +<p>It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7.</p> +<p>Improvements:</p> +<ul class="simple"> +<li><p>This version now supports context handlers for connections and cursors.</p></li> +</ul> +</section> +<section id="section-12"> +<h2>1.2</h2> +<p>DBUtils 1.2 was released on February 5, 2017.</p> +<p>It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6.</p> +</section> +<section id="section-13"> +<h2>1.1.1</h2> +<p>DBUtils 1.1.1 was released on February 4, 2017.</p> +<p>It is intended to be used with Python versions 2.3 to 2.7.</p> +<p>Improvements:</p> +<ul class="simple"> +<li><p>Reopen <span class="docutils literal">SteadyDB</span> connections when commit or rollback fails +(suggested by Ben Hoyt).</p></li> +</ul> +<p>Bugfixes:</p> +<ul class="simple"> +<li><p>Fixed a problem when running under Jython (reported by Vitaly Kruglikov).</p></li> +</ul> +</section> +<section id="section-14"> +<h2>1.1</h2> +<p>DBUtils 1.1 was released on August 14, 2011.</p> +<p>Improvements:</p> +<ul class="simple"> +<li><p>The transparent reopening of connections is actually an undesired behavior +if it happens during database transactions. In these cases, the transaction +should fail and the error be reported back to the application instead of the +rest of the transaction being executed in a new connection and therefore in +a new transaction. Therefore DBUtils now allows suspending the transparent +reopening during transactions. All you need to do is indicate the beginning +of a transaction by calling the <span class="docutils literal">begin()</span> method of the connection. +DBUtils makes sure that this method always exists, even if the database +driver does not support it.</p></li> +<li><p>If the database driver supports a <span class="docutils literal">ping()</span> method, then DBUtils can use it +to check whether connections are alive instead of just trying to use the +connection and reestablishing it in case it was dead. Since these checks are +done at the expense of some performance, you have exact control when these +are executed via the new <span class="docutils literal">ping</span> parameter.</p></li> +<li><p><span class="docutils literal">PooledDB</span> has got another new parameter <span class="docutils literal">reset</span> for controlling how +connections are reset before being put back into the pool.</p></li> +</ul> +<p>Bugfixes:</p> +<ul class="simple"> +<li><p>Fixed propagation of error messages when the connection was lost.</p></li> +<li><p>Fixed an issue with the <span class="docutils literal">setoutputsize()</span> cursor method.</p></li> +<li><p>Fixed some minor issues with the <span class="docutils literal">DBUtilsExample</span> for Webware.</p></li> +</ul> +</section> +<section id="section-15"> +<h2>1.0</h2> +<p>DBUtils 1.0 was released on November 29, 2008.</p> +<p>It is intended to be used with Python versions 2.2 to 2.6.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Added a <span class="docutils literal">failures</span> parameter for configuring the exception classes for +which the failover mechanisms is applied (as suggested by Matthew Harriger).</p></li> +<li><p>Added a <span class="docutils literal">closeable</span> parameter for configuring whether connections can be +closed (otherwise closing connections will be silently ignored).</p></li> +<li><p>It is now possible to override defaults via the <span class="docutils literal">creator.dbapi</span> and +<span class="docutils literal">creator.threadsafety</span> attributes.</p></li> +<li><p>Added an alias method <span class="docutils literal">dedicated_connection</span> as a shorthand for +<span class="docutils literal">connection(shareable=False)</span>.</p></li> +<li><p>Added a version attribute to all exported classes.</p></li> +<li><p>Where the value <span class="docutils literal">0</span> has the meaning "unlimited", parameters can now be also +set to the value <span class="docutils literal">None</span> instead.</p></li> +<li><p>It turned out that <span class="docutils literal">threading.local</span> does not work properly with +<span class="docutils literal">mod_wsgi</span>, so we use the Python implementation for thread-local data +even when a faster <span class="docutils literal">threading.local</span> implementation is available. +A new parameter <span class="docutils literal">threadlocal</span> allows you to pass an arbitrary class +such as <span class="docutils literal">threading.local</span> if you know it works in your environment.</p></li> +</ul> +<p>Bugfixes and improvements:</p> +<ul class="simple"> +<li><p>In some cases, when instance initialization failed or referenced objects +were already destroyed, finalizers could throw exceptions or create infinite +recursion (problem reported by Gregory Pinero and Jehiah Czebotar).</p></li> +<li><p>DBUtils now tries harder to find the underlying DB-API 2 module if only a +connection creator function is specified. This had not worked before with +the MySQLdb module (problem reported by Gregory Pinero).</p></li> +</ul> +</section> +<section id="section-16"> +<h2>0.9.4</h2> +<p>DBUtils 0.9.4 was released on July 7, 2007.</p> +<p>This release fixes a problem in the destructor code and has been supplemented +with a German User's Guide.</p> +<p>Again, please note that the <span class="docutils literal">dbapi</span> parameter has been renamed to <span class="docutils literal">creator</span> +in the last release, since you can now pass custom creator functions +for database connections instead of DB-API 2 modules.</p> +</section> +<section id="section-17"> +<h2>0.9.3</h2> +<p>DBUtils 0.9.3 was released on May 21, 2007.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Support custom creator functions for database connections. +These can now be used as the first parameter instead of an DB-API module +(suggested by Ezio Vernacotola).</p></li> +<li><p>Added destructor for steady connections.</p></li> +<li><p>Use <a class="reference external" href="https://github.com/pypa/setuptools">setuptools</a> if available.</p></li> +<li><p>Some code cleanup.</p></li> +<li><p>Some fixes in the documentation. +Added Chinese translation of the User's Guide, kindly contributed by gashero.</p></li> +</ul> +</section> +<section id="section-18"> +<h2>0.9.2</h2> +<p>DBUtils 0.9.2 was released on September 22, 2006.</p> +<p>It is intended to be used with Python versions 2.2 to 2.5.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Renamed <span class="docutils literal">SolidDB</span> to <span class="docutils literal">SteadyDB</span> to avoid confusion with the "solidDB" +storage engine. Accordingly, renamed <span class="docutils literal">SolidPg</span> to <span class="docutils literal">SteadyPg</span>.</p></li> +</ul> +</section> +<section id="section-19"> +<h2>0.9.1</h2> +<p>DBUtils 0.9.1 was released on May 8, 2006.</p> +<p>It is intended to be used with Python versions 2.2 to 2.4.</p> +<p>Changes:</p> +<ul class="simple"> +<li><p>Added <span class="docutils literal">_closeable</span> attribute and made persistent connections not closeable +by default. This allows <span class="docutils literal">PersistentDB</span> to be used in the same way as you +would use <span class="docutils literal">PooledDB</span>.</p></li> +<li><p>Allowed arguments in the DB-API 2 <span class="docutils literal">cursor()</span> method. MySQLdb is using this +to specify cursor classes. (Suggested by Michael Palmer.)</p></li> +<li><p>Improved the documentation and added a User's Guide.</p></li> +</ul> +</section> +<section id="section-20"> +<h2>0.8.1 - 2005-09-13</h2> +<p>DBUtils 0.8.1 was released on September 13, 2005.</p> +<p>It is intended to be used with Python versions 2.0 to 2.4.</p> +<p>This is the first public release of DBUtils.</p> +</section> +</main> +</body> +</html> diff --git a/docs/changelog.rst b/docs/changelog.rst new file mode 100644 index 0000000..796684e --- /dev/null +++ b/docs/changelog.rst @@ -0,0 +1,276 @@ +Changelog for DBUtils ++++++++++++++++++++++ + +3.1.0 +===== + +DBUtils 3.1.0 was released on March 17, 2024. + +Changes: + +* Support Python version 3.12, cease support for Python 3.6. +* Various small internal improvements and modernizations. + + +3.0.3 +===== + +DBUtils 3.0.3 was released on April 27, 2023. + +Changes: + +* Support Python version 3.11. +* Improve determination of DB API module if creator is specified. +* Minor fixes and section an advanced usage in docs. + +3.0.2 +===== + +DBUtils 3.0.2 was released on January 14, 2022. + +The optional iterator protocol on cursors is now supported. + +3.0.1 +===== + +DBUtils 3.0.1 was released on December 22, 2021. + +It includes ``InterfaceError`` to the default list of exceptions +for which the connection failover mechanism is applied. +You can override this with the ``failures`` parameter. + +3.0.0 +===== + +DBUtils 3.0.0 was released on November 26, 2021. + +It is intended to be used with Python versions 3.6 to 3.10. + +Changes: + +* Cease support for Python 2 and 3.5, minor optimizations. + +2.0.3 +===== + +DBUtils 2.0.3 was released on November 26, 2021. + +Changes: + +* Support Python version 3.10. + +2.0.2 +===== + +DBUtils 2.0.2 was released on June 8, 2021. + +Changes: + +* Allow using context managers for pooled connections. + +2.0.1 +===== + +DBUtils 2.0.1 was released on April 8, 2021. + +Changes: + +* Avoid "name Exception is not defined" when exiting. + +2.0 +=== + +DBUtils 2.0 was released on September 26, 2020. + +It is intended to be used with Python versions 2.7 and 3.5 to 3.9. + +Changes: + +* DBUtils does not act as a Webware plugin anymore, it is now just an ordinary + Python package (of course it could be used as such also before). +* The Webware ``Examples`` folder has been removed. +* Folders, packages and modules have been renamed to lower-case. + Particularly, you need to import ``dbutils`` instead of ``DBUtils`` now. +* The internal naming conventions have also been changed to comply with PEP8. +* The documentation has been adapted to reflect the changes in this version. +* This changelog has been compiled from the former release notes. + +1.4 +=== + +DBUtils 1.4 was released on September 26, 2020. + +It is intended to be used with Python versions 2.7 and 3.5 to 3.9. + +Improvements: + +* The ``SteadyDB`` and ``SteadyPg`` classes only reconnect after the + ``maxusage`` limit has been reached when the connection is not currently + inside a transaction. + +1.3 +=== + +DBUtils 1.3 was released on March 3, 2018. + +It is intended to be used with Python versions 2.6, 2.7 and 3.4 to 3.7. + +Improvements: + +* This version now supports context handlers for connections and cursors. + +1.2 +=== + +DBUtils 1.2 was released on February 5, 2017. + +It is intended to be used with Python versions 2.6, 2.7 and 3.0 to 3.6. + +1.1.1 +===== + +DBUtils 1.1.1 was released on February 4, 2017. + +It is intended to be used with Python versions 2.3 to 2.7. + +Improvements: + +* Reopen ``SteadyDB`` connections when commit or rollback fails + (suggested by Ben Hoyt). + +Bugfixes: + +* Fixed a problem when running under Jython (reported by Vitaly Kruglikov). + +1.1 +=== + +DBUtils 1.1 was released on August 14, 2011. + +Improvements: + +* The transparent reopening of connections is actually an undesired behavior + if it happens during database transactions. In these cases, the transaction + should fail and the error be reported back to the application instead of the + rest of the transaction being executed in a new connection and therefore in + a new transaction. Therefore DBUtils now allows suspending the transparent + reopening during transactions. All you need to do is indicate the beginning + of a transaction by calling the ``begin()`` method of the connection. + DBUtils makes sure that this method always exists, even if the database + driver does not support it. +* If the database driver supports a ``ping()`` method, then DBUtils can use it + to check whether connections are alive instead of just trying to use the + connection and reestablishing it in case it was dead. Since these checks are + done at the expense of some performance, you have exact control when these + are executed via the new ``ping`` parameter. +* ``PooledDB`` has got another new parameter ``reset`` for controlling how + connections are reset before being put back into the pool. + +Bugfixes: + +* Fixed propagation of error messages when the connection was lost. +* Fixed an issue with the ``setoutputsize()`` cursor method. +* Fixed some minor issues with the ``DBUtilsExample`` for Webware. + + +1.0 +=== + +DBUtils 1.0 was released on November 29, 2008. + +It is intended to be used with Python versions 2.2 to 2.6. + +Changes: + +* Added a ``failures`` parameter for configuring the exception classes for + which the failover mechanisms is applied (as suggested by Matthew Harriger). +* Added a ``closeable`` parameter for configuring whether connections can be + closed (otherwise closing connections will be silently ignored). +* It is now possible to override defaults via the ``creator.dbapi`` and + ``creator.threadsafety`` attributes. +* Added an alias method ``dedicated_connection`` as a shorthand for + ``connection(shareable=False)``. +* Added a version attribute to all exported classes. +* Where the value ``0`` has the meaning "unlimited", parameters can now be also + set to the value ``None`` instead. +* It turned out that ``threading.local`` does not work properly with + ``mod_wsgi``, so we use the Python implementation for thread-local data + even when a faster ``threading.local`` implementation is available. + A new parameter ``threadlocal`` allows you to pass an arbitrary class + such as ``threading.local`` if you know it works in your environment. + +Bugfixes and improvements: + +* In some cases, when instance initialization failed or referenced objects + were already destroyed, finalizers could throw exceptions or create infinite + recursion (problem reported by Gregory Pinero and Jehiah Czebotar). +* DBUtils now tries harder to find the underlying DB-API 2 module if only a + connection creator function is specified. This had not worked before with + the MySQLdb module (problem reported by Gregory Pinero). + +0.9.4 +===== + +DBUtils 0.9.4 was released on July 7, 2007. + +This release fixes a problem in the destructor code and has been supplemented +with a German User's Guide. + +Again, please note that the ``dbapi`` parameter has been renamed to ``creator`` +in the last release, since you can now pass custom creator functions +for database connections instead of DB-API 2 modules. + +0.9.3 +===== + +DBUtils 0.9.3 was released on May 21, 2007. + +Changes: + +* Support custom creator functions for database connections. + These can now be used as the first parameter instead of an DB-API module + (suggested by Ezio Vernacotola). +* Added destructor for steady connections. +* Use setuptools_ if available. +* Some code cleanup. +* Some fixes in the documentation. + Added Chinese translation of the User's Guide, kindly contributed by gashero. + +.. _setuptools: https://github.com/pypa/setuptools + +0.9.2 +===== + +DBUtils 0.9.2 was released on September 22, 2006. + +It is intended to be used with Python versions 2.2 to 2.5. + +Changes: + +* Renamed ``SolidDB`` to ``SteadyDB`` to avoid confusion with the "solidDB" + storage engine. Accordingly, renamed ``SolidPg`` to ``SteadyPg``. + +0.9.1 +===== + +DBUtils 0.9.1 was released on May 8, 2006. + +It is intended to be used with Python versions 2.2 to 2.4. + +Changes: + +* Added ``_closeable`` attribute and made persistent connections not closeable + by default. This allows ``PersistentDB`` to be used in the same way as you + would use ``PooledDB``. +* Allowed arguments in the DB-API 2 ``cursor()`` method. MySQLdb is using this + to specify cursor classes. (Suggested by Michael Palmer.) +* Improved the documentation and added a User's Guide. + +0.8.1 - 2005-09-13 +================== + +DBUtils 0.8.1 was released on September 13, 2005. + +It is intended to be used with Python versions 2.0 to 2.4. + +This is the first public release of DBUtils. diff --git a/docs/dependencies_db.png b/docs/dependencies_db.png new file mode 100644 index 0000000..f460fc0 Binary files /dev/null and b/docs/dependencies_db.png differ diff --git a/docs/dependencies_pg.png b/docs/dependencies_pg.png new file mode 100644 index 0000000..380def9 Binary files /dev/null and b/docs/dependencies_pg.png differ diff --git a/docs/doc.css b/docs/doc.css new file mode 100644 index 0000000..c647434 --- /dev/null +++ b/docs/doc.css @@ -0,0 +1,75 @@ +/* + Style sheet for DBUtils documentation pages. +*/ + +/* First import default style for pages created with Docutils: */ + +@import url(docutils.css); + +/* Customization for DBUtils goes here: */ + +p { + margin-top: 6pt; + margin-bottom: 6pt; + text-align: justify; +} +li { + margin-bottom: 6pt; +} +h1, h2 { + color: #002352; +} +h3, h4 { + color: #002352; +} +h1 { + font-size: 18pt; +} +h2 { + font-size: 16pt; +} +h3 { + font-size: 14pt; +} +h1.title { + padding: 4pt; + margin-bottom: 12pt; + text-align: center; + border-bottom: 1pt solid #025; + padding-bottom: 8pt; +} +.contents ul { + list-style: none; + margin-bottom: 24pt; + padding-left: 0em; + margin-left: 2em; +} +.contents ul li { + font-size: 14pt; + margin-bottom: 2pt; +} +.contents ul ul { + list-style-type: none; + margin-top: 1pt; + margin-bottom: 1pt; + padding-left: 0em; + margin-left: 1.5em; +} +.contents ul ul li { + font-size: 13pt; + margin-bottom: 1pt; +} +.contents > p.topic-title { + font-size: 24pt; +} +.warning { + color: brown; +} +.admonition-title { + background-color: #F5F5DC; + padding: 1pt 3pt; +} +.admonition-title::before { + content: "⚠"; + margin-right: .5em; +} diff --git a/docs/docutils.css b/docs/docutils.css new file mode 100644 index 0000000..50d0088 --- /dev/null +++ b/docs/docutils.css @@ -0,0 +1,2 @@ +/* CSS 3 style sheet for the output of Docutils 0.21 HTML writer. */ +div.dedication,nav.contents{padding:0;font-style:italic}h1.title,table tr{text-align:left}.footnote,pre.code,pre.doctest-block,pre.literal-block,pre.math{overflow:auto}body{font-family:Georgia,serif;background-color:#fafaf6;font-size:1.2em;line-height:1.4;margin:auto}main{counter-reset:figure table}footer,header,main{padding:.5em 5%;background-color:#fefef8;max-width:100rem}.citation,.footnote,.topic,div.line-block,dl,h1,h2,h3,h4,h5,h6,li,ol,p,table,ul{margin-top:.5em;margin-bottom:.5em}.topic,blockquote,figure{margin:.5em 2%;padding-left:1em}dl>dd{margin-bottom:.5em}p:first-child{margin-top:0}p:last-child{margin-bottom:0}div.line-block div.line-block,dl.option-list,figure>img,pre.code,pre.doctest-block,pre.literal-block,pre.math{margin-left:calc(2% + 1em)}footer,header{font-size:smaller}h2,h3,h4,p.section-subtitle,p.sidebar-subtitle,p.sidebar-title,p.subtitle,p.topic-title{font-weight:400;font-style:italic;text-align:left}.sectnum{font-style:normal}h1.title{margin-top:2.4em;margin-bottom:2em;font-size:2.4em}h1+p.subtitle{margin-top:-2em;margin-bottom:2em;font-size:2em}section{margin-top:2em}.contents>p.topic-title,h2{font-size:2.2em}h2+p.section-subtitle{font-size:1.6em}h3{font-size:1.2em}h3+p.section-subtitle{font-size:1.1em}figure.marginal>figcaption,h4,p.section-subtitle{font-size:1em}div.dedication{margin-left:0;font-size:1.2em}div.dedication p.topic-title{display:none}.topic p.attribution,blockquote p.attribution{text-align:right}ul.auto-toc>li>p{padding-left:1em;text-indent:-1em}nav.contents ul{padding-left:1em}hr{border:0;border-top:1px solid #ccc;margin:1em 10%}ol,ul{padding-left:1.1em}dd{margin-left:1.5em}dd>dl:first-child,dd>ol:first-child,dd>ul:first-child{clear:left}dl.docinfo>dd,dl.field-list>dd,dl.option-list>dd{margin-left:4em}dl.field-list.narrow>dd{margin-left:3em}dl.field-list.run-in>dd p{display:block}dl.description>dt,dl.docinfo>dt,dl.field-list>dt{font-weight:400;font-style:italic}dl.description>dt{clear:left;float:left;margin:0;padding:0 .5em 0 0}dl.description>dd:after{display:block;content:"";clear:both}.citation-list,.footnote-list{display:contents}.citation{padding-left:1.5em}.citation .label{margin-left:-1.5em}figure{display:flex;flex-wrap:wrap;align-items:flex-start}figure.fullwidth>img,figure>img{margin:0 .5em .5em 0;padding:0}figcaption{font-size:.8em}.fullwidth>figcaption{font-size:inherit}figure.numbered>figcaption>p:before{counter-increment:figure;content:"Figure " counter(figure) ": "}table.booktabs{border-top:2px solid;border-bottom:2px solid}table.booktabs *{border:0}table.booktabs th{border-bottom:thin solid}table.numbered>caption:before{counter-increment:table;content:"Table " counter(table) ": "}.admonition,.system-message{border-style:solid;border-color:silver;border-width:thin;margin:1em 0;padding:.5em}.attention p.admonition-title,.caution p.admonition-title,.danger p.admonition-title,.warning p.admonition-title,div.error{color:maroon}code .comment,pre.code .comment{color:#5c6576}code .keyword,pre.code .keyword{color:#3b0d06;font-weight:700}code .literal.string,pre.code .literal.string{color:#0c5404}code .name.builtin,pre.code .name.builtin{color:#352b84}code .deleted,pre.code .deleted{background-color:#deb0a1}code .inserted,pre.code .inserted{background-color:#a3d289}.sans{font-family:"Gill Sans","Gill Sans MT",Calibri,"Lucida Sans","Noto Sans",sans-serif;letter-spacing:.02em}a{color:inherit}a:link,a:link:hover{text-decoration:underline}.backrefs a:link,.contents a:link,a.citation-reference:link,a.image-reference:link,a.toc-backref:link,a[href^="#system-message"],a[role=doc-backlink]:link,a[role=doc-noteref]:link{text-decoration:none}.contents>p.topic-title,.fullwidth,footer,h1,h2,h3,header,hr.docutils{clear:both}div.align-left,figure.align-left,img.align-left,svg.align-left,table.align-left,video.align-left{margin-left:0;padding-left:0;padding-right:.5em;clear:left;float:left}figure.align-left>img{margin-left:0;padding-left:0}div.align-right,img.align-right,svg.align-right,video.align-right{padding-left:.5em;clear:right;float:right}figure.align-right{clear:right;float:right}figure.align-right>img{justify-self:right;padding:0}table.align-right{margin-right:2.5%}figure.align-center{align-content:center;justify-content:center}figure.align-center>img{padding-left:0;justify-self:center}.admonition.marginal,.marginal,.topic.marginal,aside.sidebar{background-color:#efefea;box-sizing:border-box;margin-left:2%;margin-right:0;padding:.5em;font-size:.8em}aside.sidebar{background-color:inherit}.footnote{font-size:smaller}@media (min-width:35em){footer,header,main{padding:.5em calc(15% - 3rem);line-height:1.6}.admonition.marginal,.marginal,.topic.marginal,aside.sidebar{max-width:45%;float:right;clear:right}dl.docinfo>dd,dl.field-list>dd,dl.option-list>dd{margin-left:6em}}@media (min-width:65em){main,section{display:grid;grid-template-columns:[content] minmax(0,6fr) [margin] 3fr [end];grid-column-gap:calc(3em + 1%)}main>section,section>section{grid-column:1/end}footer,header,main{padding-right:5%}section>figure{display:contents}.citation.align-left,.footnote.align-left,figure>img,main>*,section>*{grid-column:content}.citation.align-left{font-size:1em;padding-left:1.5em}.citation.align-left .label{margin-left:-1.5em}figure>img{margin:.5em 2%;padding-left:1em}.admonition.marginal,.citation,.footnote,.marginal,.topic.marginal,aside.sidebar,figcaption{grid-column:margin;width:auto;max-width:55em;margin:.5em 0;border:none;padding:0;font-size:.8em;text-align:initial;background-color:inherit}.admonition.marginal{padding:.5em}figure.marginal{display:block;margin:.5em 0}.citation,.footnote{padding-left:0}.citation .label,.footnote .label{margin-left:0}.fullwidth,.fullwidth figcaption,.fullwidth img,aside.system-message,div.abstract,div.dedication,dl.docinfo,h1.title,nav.contents,p.subtitle,pre{grid-column:content/end;margin-right:calc(10% - 3rem);max-width:55em}}@media (min-width:100em){footer,header,main{padding-left:30%}main>nav.contents{position:fixed;top:0;left:0;box-sizing:border-box;width:25%;height:100vh;margin:0;background-color:#fafaf6;padding:5.5em 2%;overflow:auto}main>nav.contents>*{padding-left:0}} \ No newline at end of file diff --git a/DBUtils/Docs/UsersGuide.de.html b/docs/main.de.html similarity index 61% rename from DBUtils/Docs/UsersGuide.de.html rename to docs/main.de.html index 948e12a..3627cf0 100644 --- a/DBUtils/Docs/UsersGuide.de.html +++ b/docs/main.de.html @@ -1,211 +1,195 @@ <!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="de" lang="de"> <head> -<meta charset="utf-8"/> -<meta name="generator" content="Docutils 0.15.2: http://docutils.sourceforge.net/" /> +<meta charset="utf-8" /> +<meta name="generator" content="Docutils 0.21.2: https://docutils.sourceforge.io/" /> +<meta name="viewport" content="width=device-width, initial-scale=1" /> <title>Benutzeranleitung für DBUtils</title> -<link rel="stylesheet" href="Doc.css" type="text/css" /> +<link rel="stylesheet" href="doc.css" type="text/css" /> </head> -<body> -<div class="document" id="benutzeranleitung-fur-dbutils"> +<body class="with-toc"> +<main id="benutzeranleitung-fur-dbutils"> <h1 class="title">Benutzeranleitung für DBUtils</h1> <dl class="docinfo simple"> -<dt class="version">Version</dt> -<dd class="version">1.4</dd> -<dt class="released">Released</dt> -<dd class="released"><p>09/26/20</p> -</dd> -<dt class="translations">Translations</dt> -<dd class="translations"><p><a class="reference external" href="UsersGuide.html">English</a> | German</p> +<dt class="version">Version<span class="colon">:</span></dt> +<dd class="version">3.1.0</dd> +<dt class="translations">Translations<span class="colon">:</span></dt> +<dd class="translations"><p><a class="reference external" href="main.html">English</a> | German</p> </dd> </dl> -<div class="contents topic" id="inhalt"> -<p class="topic-title first">Inhalt</p> +<nav class="contents" id="inhalt" role="doc-toc"> +<p class="topic-title">Inhalt</p> <ul class="simple"> -<li><p><a class="reference internal" href="#zusammenfassung" id="id4">Zusammenfassung</a></p></li> -<li><p><a class="reference internal" href="#module" id="id5">Module</a></p></li> -<li><p><a class="reference internal" href="#download" id="id6">Download</a></p></li> -<li><p><a class="reference internal" href="#installation" id="id7">Installation</a></p> +<li><p><a class="reference internal" href="#zusammenfassung" id="toc-entry-1">Zusammenfassung</a></p></li> +<li><p><a class="reference internal" href="#module" id="toc-entry-2">Module</a></p></li> +<li><p><a class="reference internal" href="#download" id="toc-entry-3">Download</a></p></li> +<li><p><a class="reference internal" href="#installation" id="toc-entry-4">Installation</a></p> <ul> -<li><p><a class="reference internal" href="#installation-als-eigenstandiges-paket" id="id8">Installation als eigenständiges Paket</a></p></li> -<li><p><a class="reference internal" href="#installation-als-unterpaket-plug-in-von-webware-for-python" id="id9">Installation als Unterpaket (Plug-In) von Webware for Python</a></p></li> +<li><p><a class="reference internal" href="#installation-1" id="toc-entry-5">Installation</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#anforderungen" id="id10">Anforderungen</a></p></li> -<li><p><a class="reference internal" href="#funktionalitat" id="id11">Funktionalität</a></p> +<li><p><a class="reference internal" href="#anforderungen" id="toc-entry-6">Anforderungen</a></p></li> +<li><p><a class="reference internal" href="#funktionalitat" id="toc-entry-7">Funktionalität</a></p> <ul> -<li><p><a class="reference internal" href="#simplepooleddb" id="id12">SimplePooledDB</a></p></li> -<li><p><a class="reference internal" href="#steadydb" id="id13">SteadyDB</a></p></li> -<li><p><a class="reference internal" href="#persistentdb" id="id14">PersistentDB</a></p></li> -<li><p><a class="reference internal" href="#pooleddb" id="id15">PooledDB</a></p></li> -<li><p><a class="reference internal" href="#die-qual-der-wahl" id="id16">Die Qual der Wahl</a></p></li> +<li><p><a class="reference internal" href="#simplepooleddb-simple-pooled-db" id="toc-entry-8">SimplePooledDB (simple_pooled_db)</a></p></li> +<li><p><a class="reference internal" href="#steadydbconnection-steady-db" id="toc-entry-9">SteadyDBConnection (steady_db)</a></p></li> +<li><p><a class="reference internal" href="#persistentdb-persistent-db" id="toc-entry-10">PersistentDB (persistent_db)</a></p></li> +<li><p><a class="reference internal" href="#pooleddb-pooled-db" id="toc-entry-11">PooledDB (pooled_db)</a></p></li> +<li><p><a class="reference internal" href="#die-qual-der-wahl" id="toc-entry-12">Die Qual der Wahl</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#benutzung" id="id17">Benutzung</a></p> +<li><p><a class="reference internal" href="#benutzung" id="toc-entry-13">Benutzung</a></p> <ul> -<li><p><a class="reference internal" href="#id1" id="id18">PersistentDB</a></p></li> -<li><p><a class="reference internal" href="#id2" id="id19">PooledDB</a></p></li> -<li><p><a class="reference internal" href="#benutzung-in-webware-for-python" id="id20">Benutzung in Webware for Python</a></p></li> +<li><p><a class="reference internal" href="#persistentdb-persistent-db-1" id="toc-entry-14">PersistentDB (persistent_db)</a></p></li> +<li><p><a class="reference internal" href="#pooleddb-pooled-db-1" id="toc-entry-15">PooledDB (pooled_db)</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#anmerkungen" id="id21">Anmerkungen</a></p></li> -<li><p><a class="reference internal" href="#zukunft" id="id22">Zukunft</a></p></li> -<li><p><a class="reference internal" href="#fehlermeldungen-und-feedback" id="id23">Fehlermeldungen und Feedback</a></p></li> -<li><p><a class="reference internal" href="#links" id="id24">Links</a></p></li> -<li><p><a class="reference internal" href="#autoren" id="id25">Autoren</a></p></li> -<li><p><a class="reference internal" href="#copyright-und-lizenz" id="id26">Copyright und Lizenz</a></p></li> +<li><p><a class="reference internal" href="#besonderheiten-bei-der-benutzung" id="toc-entry-16">Besonderheiten bei der Benutzung</a></p></li> +<li><p><a class="reference internal" href="#anmerkungen" id="toc-entry-17">Anmerkungen</a></p></li> +<li><p><a class="reference internal" href="#zukunft" id="toc-entry-18">Zukunft</a></p></li> +<li><p><a class="reference internal" href="#fehlermeldungen-und-feedback" id="toc-entry-19">Fehlermeldungen und Feedback</a></p></li> +<li><p><a class="reference internal" href="#links" id="toc-entry-20">Links</a></p></li> +<li><p><a class="reference internal" href="#autoren" id="toc-entry-21">Autoren</a></p></li> +<li><p><a class="reference internal" href="#copyright-und-lizenz" id="toc-entry-22">Copyright und Lizenz</a></p></li> </ul> -</div> -<div class="section" id="zusammenfassung"> -<h1>Zusammenfassung</h1> +</nav> +<section id="zusammenfassung"> +<h2>Zusammenfassung</h2> <p><a class="reference external" href="https://github.com/WebwareForPython/DBUtils">DBUtils</a> ist eine Sammlung von Python-Modulen, mit deren Hilfe man in <a class="reference external" href="https://www.python.org">Python</a> geschriebene Multithread-Anwendungen auf sichere und effiziente Weise an -Datenbanken anbinden kann. DBUtils wurde mit Blick auf <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> -als Anwendung und <a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> als <a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a>-Datenbankadapter entwickelt, -kann aber für beliebige Python-Anwendungen und beliebige auf <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> -beruhende Python-Datenbankadapter verwendet werden.</p> -</div> -<div class="section" id="module"> -<h1>Module</h1> +Datenbanken anbinden kann.</p> +<p>DBUtils wurde ursprünglich speziell für <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> als Anwendung +und <a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> als <a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a>-Datenbankadapter entwickelt, +kann aber inzwischen für beliebige Python-Anwendungen und beliebige +auf <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> beruhende Python-Datenbankadapter verwendet werden.</p> +</section> +<section id="module"> +<h2>Module</h2> <p>DBUtils ist als Python-Package realisiert worden, das aus zwei verschiedenen Gruppen von Modulen besteht: Einer Gruppe zur Verwendung mit beliebigen -DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen PyGreSQL-Datenbankadapter-Modul.</p> +DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen +PyGreSQL-Datenbankadapter-Modul.</p> <table> -<colgroup> -<col style="width: 29%" /> -<col style="width: 71%" /> -</colgroup> <thead> <tr><th class="head" colspan="2"><p>Allgemeine Variante für beliebige DB-API-2-Adapter</p></th> </tr> </thead> <tbody> -<tr><td><p>SteadyDB.py</p></td> +<tr><td><p>steady_db</p></td> <td><p>Gehärtete DB-API-2-Datenbankverbindungen</p></td> </tr> -<tr><td><p>PooledDB.py</p></td> +<tr><td><p>pooled_db</p></td> <td><p>Pooling für DB-API-2-Datenbankverbindungen</p></td> </tr> -<tr><td><p>PersistentDB.py</p></td> +<tr><td><p>persistent_db</p></td> <td><p>Persistente DB-API-2-Datenbankverbindungen</p></td> </tr> -<tr><td><p>SimplePooledDB.py</p></td> +<tr><td><p>simple_pooled_db</p></td> <td><p>Einfaches Pooling für DB-API 2</p></td> </tr> </tbody> </table> <table> -<colgroup> -<col style="width: 29%" /> -<col style="width: 71%" /> -</colgroup> <thead> <tr><th class="head" colspan="2"><p>Variante speziell für den klassischen PyGreSQL-Adapter</p></th> </tr> </thead> <tbody> -<tr><td><p>SteadyPg.py</p></td> +<tr><td><p>steady_pg</p></td> <td><p>Gehärtete klassische PyGreSQL-Verbindungen</p></td> </tr> -<tr><td><p>PooledPg.py</p></td> +<tr><td><p>pooled_pg</p></td> <td><p>Pooling für klassische PyGreSQL-Verbindungen</p></td> </tr> -<tr><td><p>PersistentPg.py</p></td> +<tr><td><p>persistent_pg</p></td> <td><p>Persistente klassische PyGreSQL-Verbindungen</p></td> </tr> -<tr><td><p>SimplePooledPg.py</p></td> +<tr><td><p>simple_pooled_pg</p></td> <td><p>Einfaches Pooling für klassisches PyGreSQL</p></td> </tr> </tbody> </table> <p>Die Abhängigkeiten der Module in der Variante für beliebige DB-API-2-Adapter sind im folgenden Diagramm dargestellt:</p> -<img alt="dbdep.gif" src="dbdep.gif" /> +<img alt="dependencies_db.png" src="dependencies_db.png" /> <p>Die Abhängigkeiten der Module in der Variante für den klassischen PyGreSQL-Adapter sehen ähnlich aus:</p> -<img alt="pgdep.gif" src="pgdep.gif" /> -</div> -<div class="section" id="download"> -<h1>Download</h1> +<img alt="depdependencies_pg.png" src="depdependencies_pg.png" /> +</section> +<section id="download"> +<h2>Download</h2> <p>Die aktuelle Version von DBUtils kann vom Python Package Index heruntergeladen werden:</p> <pre class="literal-block">https://pypi.python.org/pypi/DBUtils</pre> <p>Das Source-Code-Repository befindet sich hier auf GitHub:</p> <pre class="literal-block">https://github.com/WebwareForPython/DBUtils</pre> -</div> -<div class="section" id="installation"> -<h1>Installation</h1> -<div class="section" id="installation-als-eigenstandiges-paket"> -<h2>Installation als eigenständiges Paket</h2> -<p>Wenn Sie DBUtils für andere Anwendungen als Webware for Python verwenden -möchten, empfiehlt es sich, das Paket auf die übliche Weise zu installieren:</p> +</section> +<section id="installation"> +<h2>Installation</h2> +<section id="installation-1"> +<h3>Installation</h3> +<p>Das Paket kann auf die übliche Weise installiert werden:</p> <pre class="literal-block">python setup.py install</pre> -<p>Sie können auch <a class="reference external" href="https://pip.pypa.io/">pip</a> für Download und Installation verwenden:</p> +<p>Noch einfacher ist, das Paket in einem Schritt mit <a class="reference external" href="https://pip.pypa.io/">pip</a> automatisch +herunterzuladen und zu installieren:</p> <pre class="literal-block">pip install DBUtils</pre> -</div> -<div class="section" id="installation-als-unterpaket-plug-in-von-webware-for-python"> -<h2>Installation als Unterpaket (Plug-In) von Webware for Python</h2> -<p>Wenn Sie DBUtils nur als Ergänzung für das Web-Framework Webware for Python -verwenden wollen, sollten Sie DBUtils als Webware-Plug-In installieren:</p> -<pre class="literal-block">python setup.py install --install-lib=/pfad/zu/Webware</pre> -<p>Ersetzen Sie <span class="docutils literal">/pfad/zu/Webware</span> hierbei durch den Pfad zum Wurzelverzeichnis -der Installation von Webware for Python. Sie müssen auch das Installationsskript -von Webware for Python laufen lassen, wenn dies noch nicht geschehen ist, oder -wenn Sie DBUtils in die Webware-Dokumentation integrieren wollen:</p> -<pre class="literal-block">cd /pfad/zu/Webware -python install.py</pre> -</div> -</div> -<div class="section" id="anforderungen"> -<h1>Anforderungen</h1> -<p>DBUtils unterstützt die <a class="reference external" href="https://www.python.org">Python</a> Versionen 2.7 und 3.5 bis 3.8.</p> -<p>Die Module in der Variante für klassisches PyGreSQL benötigen <a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> +</section> +</section> +<section id="anforderungen"> +<h2>Anforderungen</h2> +<p>DBUtils unterstützt die <a class="reference external" href="https://www.python.org">Python</a> Versionen 3.7 bis 3.12.</p> +<p>Die Module in der Variante für klassisches PyGreSQL benötigen <a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> Version 4.0 oder höher, während die Module in der allgemeinen Variante für DB-API 2 mit jedem beliebigen Python-Datenbankadapter-Modul zusammenarbeiten, das auf <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> basiert.</p> -</div> -<div class="section" id="funktionalitat"> -<h1>Funktionalität</h1> +</section> +<section id="funktionalitat"> +<h2>Funktionalität</h2> <p>Dieser Abschnitt verwendet nur die Bezeichnungen der DB-API-2-Variante, aber Entsprechendes gilt auch für die PyGreSQL-Variante.</p> -<div class="section" id="simplepooleddb"> -<h2>SimplePooledDB</h2> -<p><span class="docutils literal">DBUtils.SimplePooledDB</span> ist eine sehr elementare Referenz-Implementierung -eines Pools von Datenbankverbindungen. Hiermit ist ein Vorratsspeicher an -Datenbankverbindungen gemeint, aus dem sich die Python-Anwendung bedienen kann. -Diese Implementierung ist weit weniger ausgefeilt als das eigentliche -<span class="docutils literal">PooledDB</span>-Modul und stellt insbesondere keine Ausfallsicherung zur Verfügung. -<span class="docutils literal">DBUtils.SimplePooledDB</span> ist im Wesentlichen identisch mit dem zu Webware for -Python gehörenden Modul <span class="docutils literal">MiscUtils.DBPool</span>. Es ist eher zur Verdeutlichung -des Konzepts gedacht, als zum Einsatz im produktiven Betrieb.</p> -</div> -<div class="section" id="steadydb"> -<h2>SteadyDB</h2> -<p><span class="docutils literal">DBUtils.SteadyDB</span> ist ein Modul, das "gehärtete" Datenbankverbindungen -bereitstellt, denen gewöhnlichen Verbindungen eines DB-API-2-Datenbankadapters -zugrunde liegen. Eine "gehärtete" Verbindung wird bei Zugriff automatisch, -ohne dass die Anwendung dies bemerkt, wieder geöffnet, wenn sie geschlossen -wurde, die Datenbankverbindung unterbrochen wurde, oder wenn sie öfter als -ein optionales Limit genutzt wurde.</p> +<p>DBUtils installiert sich als Paket <span class="docutils literal">dbutils</span>, das alle hier beschriebenen +Module enthält. Jedes dieser Modul enthält im Wesentlichen eine Klasse, die +einen analogen Namen trägt und die jeweilige Funktionalität bereitstellt. +So enthält z.B. das Modul <span class="docutils literal">dbutils.pooled_db</span> die Klasse <span class="docutils literal">PooledDB</span>.</p> +<section id="simplepooleddb-simple-pooled-db"> +<h3>SimplePooledDB (simple_pooled_db)</h3> +<p>Die Klasse <span class="docutils literal">SimplePooledDB</span> in <span class="docutils literal">dbutils.simple_pooled_db</span> ist eine sehr +elementare Referenz-Implementierung eines Pools von Datenbankverbindungen. +Hiermit ist ein Vorratsspeicher an Datenbankverbindungen gemeint, aus dem sich +die Python-Anwendung bedienen kann. Diese Implementierung ist weit weniger +ausgefeilt als das eigentliche <span class="docutils literal">pooled_db</span>-Modul und stellt insbesondere +keine Ausfallsicherung zur Verfügung. <span class="docutils literal">dbutils.simple_pooled_db</span> ist im +Wesentlichen identisch mit dem zu Webware for Python gehörenden Modul +<span class="docutils literal">MiscUtils.DBPool</span>. Es ist eher zur Verdeutlichung des Konzepts gedacht, +als zum Einsatz im produktiven Betrieb.</p> +</section> +<section id="steadydbconnection-steady-db"> +<h3>SteadyDBConnection (steady_db)</h3> +<p>Die Klasse <span class="docutils literal">SteadyDBConnection</span> im Modul <span class="docutils literal">dbutils.steady_db</span> stellt +"gehärtete" Datenbankverbindungen bereit, denen gewöhnlichen Verbindungen +eines DB-API-2-Datenbankadapters zugrunde liegen. Eine "gehärtete" Verbindung +wird bei Zugriff automatisch, ohne dass die Anwendung dies bemerkt, wieder +geöffnet, wenn sie geschlossen wurde, die Datenbankverbindung unterbrochen +wurde, oder wenn sie öfter als ein optionales Limit genutzt wurde.</p> <p>Ein typisches Beispiel wo dies benötig wird, ist, wenn die Datenbank neu gestartet wurde, während Ihre Anwendung immer noch läuft und Verbindungen zur Datenbank offen hat, oder wenn Ihre Anwendung auf eine entfernte Datenbank über ein Netzwerk zugreift, das durch eine Firewall geschützt ist, und die Firewall neu gestartet wurde und dabei ihren Verbindungsstatus verloren hat.</p> -<p>Normalerweise benutzen Sie das <span class="docutils literal">SteadyDB</span>-Modul nicht direkt; es wird aber -von den beiden nächsten Modulen benötigt, <span class="docutils literal">PersistentDB</span> und <span class="docutils literal">PooledDB</span>.</p> -</div> -<div class="section" id="persistentdb"> -<h2>PersistentDB</h2> -<p><span class="docutils literal">DBUtils.PersistentDB</span> stellt gehärtete, thread-affine, persistente -Datenbankverbindungen zur Verfügung, unter Benutzung eines beliebigen -DB-API-2-Datenbankadapters. Mit "thread-affin" und "persistent" ist -hierbei gemeint, dass die einzelnen Datenbankverbindungen den jeweiligen -Threads fest zugeordnet bleiben und während der Laufzeit des Threads nicht -geschlossen werden.</p> +<p>Normalerweise benutzen Sie das <span class="docutils literal">steady_db</span>-Modul nicht direkt; es wird aber +von den beiden nächsten Modulen benötigt, <span class="docutils literal">persistent_db</span> und <span class="docutils literal">pooled_db</span>.</p> +</section> +<section id="persistentdb-persistent-db"> +<h3>PersistentDB (persistent_db)</h3> +<p>Die Klasse <span class="docutils literal">PersistentDB</span> im Modul <span class="docutils literal">dbutils.persistent_db</span> stellt +gehärtete, thread-affine, persistente Datenbankverbindungen zur Verfügung, +unter Benutzung eines beliebigen DB-API-2-Datenbankadapters. Mit "thread-affin" +und "persistent" ist hierbei gemeint, dass die einzelnen Datenbankverbindungen +den jeweiligen Threads fest zugeordnet bleiben und während der Laufzeit des +Threads nicht geschlossen werden.</p> <p>Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie -<span class="docutils literal">PersistentDB</span>-Datenbankverbindungen einsetzen:</p> -<img alt="persist.gif" src="persist.gif" /> +<span class="docutils literal">persistent_db</span>-Datenbankverbindungen einsetzen:</p> +<img alt="persistent.png" src="persistent.png" /> <p>Immer wenn ein Thread eine Datenbankverbindung zum ersten Mal öffnet, wird eine neue Datenbankverbindung geöffnet, die von da an immer wieder für genau diesen Thread verwendet wird. Wenn der Thread die Datenbankverbindung schließt, @@ -213,25 +197,25 @@ <h2>PersistentDB</h2> gleiche Thread wieder eine Datenbankverbindung anfordert, diese gleiche bereits geöffnete Datenbankverbindung wieder verwendet werden kann. Die Verbindung wird automatisch geschlossen, wenn der Thread beendet wird.</p> -<p>Kurz gesagt versucht <span class="docutils literal">PersistentDB</span> Datenbankverbindungen wiederzuverwerten, +<p>Kurz gesagt versucht <span class="docutils literal">persistent_db</span> Datenbankverbindungen wiederzuverwerten, um die Gesamteffizienz der Datenbankzugriffe Ihrer Multithread-Anwendungen zu steigern, aber es wird dabei sichergestellt, dass verschiedene Threads niemals die gleiche Verbindung benutzen.</p> -<p>Daher arbeitet <span class="docutils literal">PersistentDB</span> sogar dann problemlos, wenn der zugrunde +<p>Daher arbeitet <span class="docutils literal">persistent_db</span> sogar dann problemlos, wenn der zugrunde liegende DB-API-2-Datenbankadapter nicht thread-sicher auf der Verbindungsebene ist, oder wenn parallele Threads Parameter der Datenbank-Sitzung verändern oder Transaktionen mit mehreren SQL-Befehlen durchführen.</p> -</div> -<div class="section" id="pooleddb"> -<h2>PooledDB</h2> -<p><span class="docutils literal">DBUtils.PooledDB</span> stellt, unter Benutzung eines beliebigen -DB-API-2-Datenbankadapters, einen Pool von gehärteten, thread-sicheren -Datenbankverbindungen zur Verfügung, die automatisch, ohne dass die Anwendung -dies bemerkt, wiederverwendet werden.</p> +</section> +<section id="pooleddb-pooled-db"> +<h3>PooledDB (pooled_db)</h3> +<p>Die Klasse <span class="docutils literal">PooledDB</span> im Modul <span class="docutils literal">dbutils.pooled_db</span> stellt, unter Benutzung +eines beliebigen DB-API-2-Datenbankadapters, einen Pool von gehärteten, +thread-sicheren Datenbankverbindungen zur Verfügung, die automatisch, ohne dass +die Anwendung dies bemerkt, wiederverwendet werden.</p> <p>Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie -<span class="docutils literal">PooledDB</span>-Datenbankverbindungen einsetzen:</p> -<img alt="pool.gif" src="pool.gif" /> -<p>Wie im Diagramm angedeutet, kann <span class="docutils literal">PooledDB</span> geöffnete Datenbankverbindungen +<span class="docutils literal">pooled_db</span>-Datenbankverbindungen einsetzen:</p> +<img alt="pooled.png" src="pooled.png" /> +<p>Wie im Diagramm angedeutet, kann <span class="docutils literal">pooled_db</span> geöffnete Datenbankverbindungen den verschiedenen Threads beliebig zuteilen. Dies geschieht standardmäßig, wenn Sie den Verbindungspool mit einem positiven Wert für <span class="docutils literal">maxshared</span> einrichten und der zugrunde liegende DB-API-2-Datenbankadapter auf der Verbindungsebene @@ -247,47 +231,47 @@ <h2>PooledDB</h2> Datenbankverbindungen zurückgegeben, damit sie wiederverwertet werden kann.</p> <p>Wenn der zugrunde liegende DB-API-Datenbankadapter nicht thread-sicher ist, werden Thread-Locks verwendet, um sicherzustellen, dass die -<span class="docutils literal">PooledDB</span>-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also +<span class="docutils literal">pooled_db</span>-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also hierum keine Sorgen zu machen, aber Sie sollten darauf achten, dedizierte Datenbankverbindungen zu verwenden, sobald Sie Parameter der Datenbanksitzung verändern oder Transaktionen mit mehreren SQL-Befehlen ausführen.</p> -</div> -<div class="section" id="die-qual-der-wahl"> -<h2>Die Qual der Wahl</h2> -<p>Sowohl <span class="docutils literal">PersistentDB</span> als auch <span class="docutils literal">PooledDB</span> dienen dem gleichen Zweck, +</section> +<section id="die-qual-der-wahl"> +<h3>Die Qual der Wahl</h3> +<p>Sowohl <span class="docutils literal">persistent_db</span> als auch <span class="docutils literal">pooled_db</span> dienen dem gleichen Zweck, nämlich die Effizienz des Datenbankzugriffs durch Wiederverwendung von Datenbankverbindungen zu steigern, und dabei gleichzeitig die Stabilität zu gewährleisten, selbst wenn die Datenbankverbindung unterbrochen wird.</p> <p>Welches der beiden Module sollte also verwendet werden? Nach den obigen -Erklärungen ist es klar, dass <span class="docutils literal">PersistentDB</span> dann sinnvoller ist, wenn +Erklärungen ist es klar, dass <span class="docutils literal">persistent_db</span> dann sinnvoller ist, wenn Ihre Anwendung eine gleich bleibende Anzahl Threads verwendet, die häufig auf die Datenbank zugreifen. In diesem Fall werden Sie ungefähr die gleiche Anzahl geöffneter Datenbankverbindungen erhalten. Wenn jedoch Ihre Anwendung -häufig Threads beendet und neu startet, dann ist <span class="docutils literal">PooledDB</span> die bessere +häufig Threads beendet und neu startet, dann ist <span class="docutils literal">pooled_db</span> die bessere Lösung, die auch mehr Möglichkeiten zur Feineinstellung zur Verbesserung der Effizienz erlaubt, insbesondere bei Verwendung eines thread-sicheren DB-API-2-Datenbankadapters.</p> <p>Da die Schnittstellen beider Module sehr ähnlich sind, können Sie recht einfach von einem Modul zum anderen wechseln und austesten, welches geeigneter ist.</p> -</div> -</div> -<div class="section" id="benutzung"> -<h1>Benutzung</h1> +</section> +</section> +<section id="benutzung"> +<h2>Benutzung</h2> <p>Die Benutzung aller Module ist zwar recht ähnlich, aber es gibt vor allem bei der Initialisierung auch einige Unterschiede, sowohl zwischen den "Pooled"- und den "Persistent"-Varianten, als auch zwischen den DB-API-2- und den PyGreSQL-Varianten.</p> -<p>Wir werden hier nur auf das <span class="docutils literal">PersistentDB</span>-Modul und das etwas kompliziertere -<span class="docutils literal">PooledDB</span>-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie +<p>Wir werden hier nur auf das <span class="docutils literal">persistent_db</span>-Modul und das etwas kompliziertere +<span class="docutils literal">pooled_db</span>-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie in deren Docstrings. Unter Verwendung der Python-Interpreter-Konsole können Sie -sich die Dokumentation des <span class="docutils literal">PooledDB</span>-Moduls wie folgt anzeigen lassen (dies +sich die Dokumentation des <span class="docutils literal">pooled_db</span>-Moduls wie folgt anzeigen lassen (dies funktioniert entsprechend auch mit den anderen Modulen):</p> -<pre class="literal-block">help(PooledDB)</pre> -<div class="section" id="id1"> -<h2>PersistentDB</h2> -<p>Wenn Sie das <span class="docutils literal">PersistentDB</span>-Modul einsetzen möchten, müssen Sie zuerst einen +<pre class="literal-block">help(pooled_db)</pre> +<section id="persistentdb-persistent-db-1"> +<h3>PersistentDB (persistent_db)</h3> +<p>Wenn Sie das <span class="docutils literal">persistent_db</span>-Modul einsetzen möchten, müssen Sie zuerst einen Generator für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, -indem Sie eine Instanz der Klasse <span class="docutils literal">PersistentDB</span> erzeugen, wobei Sie folgende +indem Sie eine Instanz der Klasse <span class="docutils literal">persistent_db</span> erzeugen, wobei Sie folgende Parameter angeben müssen:</p> <ul> <li><p><span class="docutils literal">creator</span>: entweder eine Funktion, die neue DB-API-2-Verbindungen @@ -299,9 +283,10 @@ <h2>PersistentDB</h2> </li> <li><p><span class="docutils literal">setsession</span>: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. <span class="docutils literal">["set datestyle to german", <span class="pre">...]</span></span></p></li> -<li><p><span class="docutils literal">failures</span>: eine optionale Exception-Klasse oder ein Tupel von Exceptions +<li><p><span class="docutils literal">failures</span>: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe -(OperationalError, InternalError) nicht geeignet sein sollte</p></li> +(OperationalError, InterfaceError, InternalError) für das verwendete +Datenbankadapter-Modul nicht geeignet sein sollte</p></li> <li><p><span class="docutils literal">ping</span>: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der <span class="docutils literal">ping()</span>-Methode geprüft werden, falls eine solche vorhanden ist (<span class="docutils literal">0</span> = <span class="docutils literal">None</span> = nie, <span class="docutils literal">1</span> = Standardwert = immer wenn neu angefragt, @@ -322,36 +307,39 @@ <h2>PersistentDB</h2> möchten, dass jede Verbindung Ihrer lokalen Datenbank <span class="docutils literal">meinedb</span> 1000 mal wiederverwendet werden soll, sieht die Initialisierung so aus:</p> <pre class="literal-block">import pgdb # importiere das verwendete DB-API-2-Modul -from DBUtils.PersistentDB import PersistentDB +from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='meinedb')</pre> <p>Nachdem Sie den Generator mit diesen Parametern eingerichtet haben, können Sie derartige Datenbankverbindungen von da an wie folgt anfordern:</p> <pre class="literal-block">db = persist.connection()</pre> <p>Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die -"gehärtete" <span class="docutils literal">SteadyDB</span>-Version der zugrunde liegenden DB-API-2-Verbindung.</p> +"gehärtete" <span class="docutils literal">steady_db</span>-Version der zugrunde liegenden DB-API-2-Verbindung.</p> <p>Wenn Sie eine solche persistente Verbindung mit <span class="docutils literal">db.close()</span> schließen, wird dies stillschweigend ignoriert, denn sie würde beim nächsten Zugriff sowieso wieder geöffnet, und das wäre nicht im Sinne persistenter Verbindungen. Stattdessen wird die Verbindung automatisch dann geschlossen, wenn der Thread endet. Sie können dieses Verhalten ändern, indem Sie den Parameter namens <span class="docutils literal">closeable</span> setzen.</p> +<aside class="admonition warning"> +<p class="admonition-title">Warnung</p> <p>Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode -<span class="docutils literal">begin()</span> eingeleiten werden müssen. Hierdurch wird sichergestellt, dass +<span class="docutils literal">begin()</span> eingeleitet werden müssen. Hierdurch wird sichergestellt, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, before sie vom gleichen Thread erneut benutzt wird.</p> +</aside> <p>Das Holen einer Verbindung kann etwas beschleunigt werden, indem man den Parameter <span class="docutils literal">threadlocal</span> auf <span class="docutils literal">threading.local</span> setzt; dies könnte aber in einigen Umgebungen nicht funktionieren (es ist zum Beispiel bekannt, dass <span class="docutils literal">mod_wsgi</span> hier Probleme bereitet, da es Daten, die mit <span class="docutils literal">threading.local</span> gespeichert wurden, zwischen Requests löscht).</p> -</div> -<div class="section" id="id2"> -<h2>PooledDB</h2> -<p>Wenn Sie das <span class="docutils literal">PooledDB</span>-Modul einsetzen möchten, müssen Sie zuerst einen +</section> +<section id="pooleddb-pooled-db-1"> +<h3>PooledDB (pooled_db)</h3> +<p>Wenn Sie das <span class="docutils literal">pooled_db</span>-Modul einsetzen möchten, müssen Sie zuerst einen Pool für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, -indem Sie eine Instanz der Klasse <span class="docutils literal">PooledDB</span> erzeugen, wobei Sie folgende +indem Sie eine Instanz der Klasse <span class="docutils literal">pooled_db</span> erzeugen, wobei Sie folgende Parameter angeben müssen:</p> <ul> <li><p><span class="docutils literal">creator</span>: entweder eine Funktion, die neue DB-API-2-Verbindungen @@ -388,9 +376,10 @@ <h2>PooledDB</h2> in den Verbindungspool zurückgegeben werden (<span class="docutils literal">False</span> oder <span class="docutils literal">None</span> um mit <span class="docutils literal">begin()</span> gestartete Transaktionen zurückzurollen, der Standardwert <span class="docutils literal">True</span> rollt sicherheitshalber mögliche Transaktionen immer zurück)</p></li> -<li><p><span class="docutils literal">failures</span>: eine optionale Exception-Klasse oder ein Tupel von Exceptions +<li><p><span class="docutils literal">failures</span>: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe -(OperationalError, InternalError) nicht geeignet sein sollte</p></li> +(OperationalError, InterfaceError, InternalError) für das verwendete +Datenbankadapter-Modul nicht geeignet sein sollte</p></li> <li><p><span class="docutils literal">ping</span>: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der <span class="docutils literal">ping()</span>-Methode geprüft werden, falls eine solche vorhanden ist (<span class="docutils literal">0</span> = <span class="docutils literal">None</span> = nie, <span class="docutils literal">1</span> = Standardwert = immer wenn neu angefragt, @@ -406,14 +395,14 @@ <h2>PooledDB</h2> und einen Pool von mindestens fünf Datenbankverbindungen zu Ihrer Datenbank <span class="docutils literal">meinedb</span> verwenden möchten, dann sieht die Initialisierung so aus:</p> <pre class="literal-block">import pgdb # importiere das verwendete DB-API-2-Modul -from DBUtils.PooledDB import PooledDB +from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='meinedb')</pre> <p>Nachdem Sie den Pool für Datenbankverbindungen so eingerichtet haben, können Sie Verbindungen daraus wie folgt anfordern:</p> <pre class="literal-block">db = pool.connection()</pre> <p>Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die -"gehärtete" <span class="docutils literal">SteadyDB</span>-Version der zugrunde liegenden DB-API-2-Verbindung.</p> +"gehärtete" <span class="docutils literal">steady_db</span>-Version der zugrunde liegenden DB-API-2-Verbindung.</p> <p>Bitte beachten Sie, dass die Verbindung von anderen Threads mitgenutzt werden kann, wenn Sie den Parameter <span class="docutils literal">maxshared</span> auf einen Wert größer als Null gesetzt haben, und der zugrunde liegende DB-API-2-Datenbankadapter dies erlaubt. @@ -437,64 +426,69 @@ <h2>PooledDB</h2> res = cur.fetchone() cur.close() # oder del cur db.close() # oder del db</pre> +<p>Sie können dies auch durch Verwendung von Kontext-Managern vereinfachen:</p> +<pre class="literal-block">with pool.connection() as db: + with db.cursor() as cur: + cur.execute(...) + res = cur.fetchone()</pre> +<aside class="admonition warning"> +<p class="admonition-title">Warnung</p> <p>Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode -<span class="docutils literal">begin()</span> eingeleiten werden müssen. Hierdurch wird sichergestellt, +<span class="docutils literal">begin()</span> eingeleitet werden müssen. Hierdurch wird sichergestellt, dass die Verbindung nicht mehr mit anderen Threads geteilt wird, dass das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion ausgesetzt wird, und dass die Verbindung zurückgerollt wird, bevor sie wieder an den Verbindungspool zurückgegeben wird.</p> -</div> -<div class="section" id="benutzung-in-webware-for-python"> -<h2>Benutzung in Webware for Python</h2> -<p>Wenn Sie DBUtils verwenden, um von Servlets des Web-Frameworks <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware -for Python</a> auf eine Datenbank zuzugreifen, dann müssen Sie sicherstellen, -dass die Generatoren zur Erzeugung von Datenbankverbindungen nur einmal -eingerichtet werden, wenn die Anwendung startet, und nicht jedes Mal, wenn -eine Servlet-Instanz erzeugt wird. Den hierfür nötigen Code können Sie -bei der Basis-Servlet-Klasse einfügen, dort wo das Modul oder die Klasse -initialisiert wird, oder Sie können die Funktion <span class="docutils literal">contextInitialize()</span> -im <span class="docutils literal">__init__.py</span>-Skript Ihres Anwendungskontextes verwenden.</p> -<p>Das zusammen mit DButils ausgelieferte Verzeichnis <span class="docutils literal">Examples</span> enthält -einen Beispielkontext für Webware for Python, der eine kleine Demo-Datenbank -verwendet, um Teilnehmer an einer Seminarreihe zu verwalten (die Idee für -dieses Beispiel wurde dem Artikel "<a class="reference external" href="http://www.linuxjournal.com/article/2605">The Python DB-API</a>" von Andrew Kuchling -entnommen).</p> -<p>Der Beispielkontext kann konfiguriert werden, indem entweder eine Konfig-Datei -<span class="docutils literal">Configs/Database.config</span> angelegt wird, oder indem die Standard-Parameter -direkt im Beispielservlet <span class="docutils literal">Examples/DBUtilsExample.py</span> geändert werden. -Auf diese Weise können Sie einen passenden Datenbanknutzer und sein Passwort -festlegen, sowie den zugrunde liegenden Datenbankadapter auswählen (das -klassische PyGreSQL-Modul oder irgendein DB-API-2-Modul). Wenn der Parameter -<span class="docutils literal">maxcached</span> vorhanden ist, verwendet das Beispielservlet die -<span class="docutils literal">Pooled</span>-Variante, andernfalls die <span class="docutils literal">Persistent</span>-Variante.</p> -</div> -</div> -<div class="section" id="anmerkungen"> -<h1>Anmerkungen</h1> -<p>Wenn Sie einen der bekannten "Object-Relational Mapper" <a class="reference external" href="http://www.sqlobject.org/">SQLObject</a> oder -<a class="reference external" href="http://www.sqlalchemy.org">SQLAlchemy</a> verwenden, dann benötigen Sie DBUtils nicht, denn diese haben +</aside> +</section> +</section> +<section id="besonderheiten-bei-der-benutzung"> +<h2>Besonderheiten bei der Benutzung</h2> +<p>Manchmal möchte man Datenbankverbindung besonders vorbereiten, bevor sie +von DBUtils verwendet werden, und dies ist nicht immer durch Verwendung +der passenden Parameter möglich. Zum Beispiel kann es <span class="docutils literal">pyodb</span> erfordern, +dass man die Methode <span class="docutils literal">setencoding()</span> der Datenbankverbindung aufruft. +Sie können dies erreichen, indem Sie eine modifizierte Version der +Funktion <span class="docutils literal">connect()</span> verwenden und diese als <span class="docutils literal">creator</span> (dem ersten +Argument) an <span class="docutils literal">PersistentDB</span> oder <span class="docutils literal">PooledDB</span> übergeben, etwa so:</p> +<pre class="literal-block">from pyodbc import connect +from dbutils.pooled_db import PooledDB + +def creator(): + con = connect(...) + con.setdecoding(...) + return con + +creator.dbapi = pyodbc + +db_pool = PooledDB(creator, mincached=5)</pre> +</section> +<section id="anmerkungen"> +<h2>Anmerkungen</h2> +<p>Wenn Sie einen der bekannten "Object-Relational Mapper" <a class="reference external" href="http://sqlobject.org/">SQLObject</a> oder +<a class="reference external" href="https://www.sqlalchemy.org">SQLAlchemy</a> verwenden, dann benötigen Sie DBUtils nicht, denn diese haben ihre eigenen Mechanismen zum Pooling von Datenbankverbindungen eingebaut. Tatsächlich hat SQLObject 2 (SQL-API) das Pooling in eine separate Schicht ausgelagert, in der Code von DBUtils verwendet wird.</p> <p>Wenn Sie eine Lösung verwenden wie den Apache-Webserver mit <a class="reference external" href="http://modpython.org/">mod_python</a> oder <a class="reference external" href="https://github.com/GrahamDumpleton/mod_wsgi">mod_wsgi</a>, dann sollten Sie bedenken, dass Ihr Python-Code normalerweise im Kontext der Kindprozesse des Webservers läuft. Wenn Sie also das -<span class="docutils literal">PooledDB</span>-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann -werden Sie ebensoviele Pools mit Datenbankverbindungen erhalten. Wenn diese +<span class="docutils literal">pooled_db</span>-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann +werden Sie ebenso viele Pools mit Datenbankverbindungen erhalten. Wenn diese Prozesse viele Threads laufen lassen, dann mag dies eine sinnvoller Ansatz sein, wenn aber diese Prozesse nicht mehr als einen Worker-Thread starten, wie im Fall des Multi-Processing Moduls "prefork" für den Apache-Webserver, dann sollten Sie auf eine Middleware für das Connection-Pooling zurückgreifen, -die Multi-Processing unterstützt, wie zum Beispiel <a class="reference external" href="http://www.pgpool.net/">pgpool</a> oder <a class="reference external" href="https://pgbouncer.github.io/">pgbouncer</a> +die Multi-Processing unterstützt, wie zum Beispiel <a class="reference external" href="https://www.pgpool.net/">pgpool</a> oder <a class="reference external" href="https://pgbouncer.github.io/">pgbouncer</a> für die PostgreSQL-Datenbank.</p> -</div> -<div class="section" id="zukunft"> -<h1>Zukunft</h1> +</section> +<section id="zukunft"> +<h2>Zukunft</h2> <p>Einige Ideen für zukünftige Verbesserungen:</p> <ul class="simple"> <li><p>Alternativ zur Obergrenze in der Anzahl der Nutzung einer Datenbankverbindung könnte eine maximale Lebensdauer für die Verbindung implementiert werden.</p></li> -<li><p>Es könnten Module <span class="docutils literal">MonitorDB</span> und <span class="docutils literal">MonitorPg</span> hinzugefügt werden, die +<li><p>Es könnten Module <span class="docutils literal">monitor_db</span> und <span class="docutils literal">monitor_pg</span> hinzugefügt werden, die in einem separaten Thread ständig den "idle pool" und eventuell auch den "shared pool" bzw. die persistenten Verbindungen überwachen. Wenn eine unterbrochene Datenbankverbindung entdeckt wird, wird diese automatisch durch @@ -509,16 +503,14 @@ <h1>Zukunft</h1> <li><p>Optional sollten Benutzung, schlechte Verbindungen und Überschreitung von Obergrenzen in Logs gespeichert werden können.</p></li> </ul> -</div> -<div class="section" id="fehlermeldungen-und-feedback"> -<h1>Fehlermeldungen und Feedback</h1> -<p>Bitte Senden Sie Fehlermeldungen, Patches und Feedback direkt an den -Autor (unter Verwendung der unten angegebenen E-Mail-Adresse).</p> -<p>Probleme, die Webware betreffen, können auch in der <a class="reference external" href="https://lists.sourceforge.net/lists/listinfo/webware-discuss">Webware for Python -mailing list</a> diskutiert werden.</p> -</div> -<div class="section" id="links"> -<h1>Links</h1> +</section> +<section id="fehlermeldungen-und-feedback"> +<h2>Fehlermeldungen und Feedback</h2> +<p>Fehlermeldungen, Patches und Feedback können Sie als <a class="reference external" href="https://github.com/WebwareForPython/DBUtils/issues">Issues</a> oder +<a class="reference external" href="https://github.com/WebwareForPython/DBUtils/pulls">Pull Requests</a> auf der <a class="reference external" href="https://github.com/WebwareForPython/DBUtils">GitHub-Projektseite</a> von DBUtils übermitteln.</p> +</section> +<section id="links"> +<h2>Links</h2> <p>Einige Links zu verwandter und alternativer Software:</p> <ul class="simple"> <li><p><a class="reference external" href="https://github.com/WebwareForPython/DBUtils">DBUtils</a></p></li> @@ -526,20 +518,20 @@ <h1>Links</h1> <li><p><a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> Framework</p></li> <li><p>Python <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a></p></li> <li><p><a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a> Datenbank</p></li> -<li><p><a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> Python-Adapter for PostgreSQL</p></li> -<li><p><a class="reference external" href="http://www.pgpool.net/">pgpool</a> Middleware für Connection-Pooling mit PostgreSQL</p></li> +<li><p><a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> Python-Adapter for PostgreSQL</p></li> +<li><p><a class="reference external" href="https://www.pgpool.net/">pgpool</a> Middleware für Connection-Pooling mit PostgreSQL</p></li> <li><p><a class="reference external" href="https://pgbouncer.github.io/">pgbouncer</a> Middleware für Connection-Pooling mit PostgreSQL</p></li> -<li><p><a class="reference external" href="http://www.sqlobject.org/">SQLObject</a> Objekt-relationaler Mapper</p></li> -<li><p><a class="reference external" href="http://www.sqlalchemy.org">SQLAlchemy</a> Objekt-relationaler Mapper</p></li> +<li><p><a class="reference external" href="http://sqlobject.org/">SQLObject</a> Objekt-relationaler Mapper</p></li> +<li><p><a class="reference external" href="https://www.sqlalchemy.org">SQLAlchemy</a> Objekt-relationaler Mapper</p></li> </ul> -</div> -<div class="section" id="autoren"> -<h1>Autoren</h1> +</section> +<section id="autoren"> +<h2>Autoren</h2> <dl class="field-list simple"> -<dt>Autor</dt> -<dd><p>Christoph Zwerschke <<a class="reference external" href="mailto:cito@online.de">cito@online.de</a>></p> +<dt>Autor<span class="colon">:</span></dt> +<dd><p><a class="reference external" href="https://github.com/Cito">Christoph Zwerschke</a></p> </dd> -<dt>Beiträge</dt> +<dt>Beiträge<span class="colon">:</span></dt> <dd><p>DBUtils benutzt Code, Anmerkungen und Vorschläge von Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, @@ -547,14 +539,14 @@ <h1>Autoren</h1> Matthew Harriger, Gregory Piñero und Josef van Eenbergen.</p> </dd> </dl> -</div> -<div class="section" id="copyright-und-lizenz"> -<h1>Copyright und Lizenz</h1> -<p>Copyright © 2005-2018 Christoph Zwerschke. +</section> +<section id="copyright-und-lizenz"> +<h2>Copyright und Lizenz</h2> +<p>Copyright © 2005-2024 Christoph Zwerschke. Alle Rechte vorbehalten.</p> <p>DBUtils ist freie und quelloffene Software, lizenziert unter der <a class="reference external" href="https://opensource.org/licenses/MIT">MIT-Lizenz</a>.</p> -</div> -</div> +</section> +</main> </body> </html> diff --git a/DBUtils/Docs/UsersGuide.de.rst b/docs/main.de.rst similarity index 65% rename from DBUtils/Docs/UsersGuide.de.rst rename to docs/main.de.rst index ee59e63..ce97364 100644 --- a/DBUtils/Docs/UsersGuide.de.rst +++ b/docs/main.de.rst @@ -1,11 +1,10 @@ Benutzeranleitung für DBUtils +++++++++++++++++++++++++++++ -:Version: 1.4 -:Released: 09/26/20 +:Version: 3.1.0 :Translations: English_ | German -.. _English: UsersGuide.html +.. _English: main.html .. contents:: Inhalt @@ -15,10 +14,12 @@ Zusammenfassung DBUtils_ ist eine Sammlung von Python-Modulen, mit deren Hilfe man in Python_ geschriebene Multithread-Anwendungen auf sichere und effiziente Weise an -Datenbanken anbinden kann. DBUtils wurde mit Blick auf `Webware for Python`_ -als Anwendung und PyGreSQL_ als PostgreSQL_-Datenbankadapter entwickelt, -kann aber für beliebige Python-Anwendungen und beliebige auf `DB-API 2`_ -beruhende Python-Datenbankadapter verwendet werden. +Datenbanken anbinden kann. + +DBUtils wurde ursprünglich speziell für `Webware for Python`_ als Anwendung +und PyGreSQL_ als PostgreSQL_-Datenbankadapter entwickelt, +kann aber inzwischen für beliebige Python-Anwendungen und beliebige +auf `DB-API 2`_ beruhende Python-Datenbankadapter verwendet werden. Module @@ -26,41 +27,42 @@ Module DBUtils ist als Python-Package realisiert worden, das aus zwei verschiedenen Gruppen von Modulen besteht: Einer Gruppe zur Verwendung mit beliebigen -DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen PyGreSQL-Datenbankadapter-Modul. - -+-------------------+----------------------------------------------+ -| Allgemeine Variante für beliebige DB-API-2-Adapter | -+===================+==============================================+ -| SteadyDB.py | Gehärtete DB-API-2-Datenbankverbindungen | -+-------------------+----------------------------------------------+ -| PooledDB.py | Pooling für DB-API-2-Datenbankverbindungen | -+-------------------+----------------------------------------------+ -| PersistentDB.py | Persistente DB-API-2-Datenbankverbindungen | -+-------------------+----------------------------------------------+ -| SimplePooledDB.py | Einfaches Pooling für DB-API 2 | -+-------------------+----------------------------------------------+ - -+-------------------+----------------------------------------------+ -| Variante speziell für den klassischen PyGreSQL-Adapter | -+===================+==============================================+ -| SteadyPg.py | Gehärtete klassische PyGreSQL-Verbindungen | -+-------------------+----------------------------------------------+ -| PooledPg.py | Pooling für klassische PyGreSQL-Verbindungen | -+-------------------+----------------------------------------------+ -| PersistentPg.py | Persistente klassische PyGreSQL-Verbindungen | -+-------------------+----------------------------------------------+ -| SimplePooledPg.py | Einfaches Pooling für klassisches PyGreSQL | -+-------------------+----------------------------------------------+ +DB-API-2-Datenbankadaptern, und einer Gruppe zur Verwendung mit dem klassischen +PyGreSQL-Datenbankadapter-Modul. + ++------------------+----------------------------------------------+ +| Allgemeine Variante für beliebige DB-API-2-Adapter | ++==================+==============================================+ +| steady_db | Gehärtete DB-API-2-Datenbankverbindungen | ++------------------+----------------------------------------------+ +| pooled_db | Pooling für DB-API-2-Datenbankverbindungen | ++------------------+----------------------------------------------+ +| persistent_db | Persistente DB-API-2-Datenbankverbindungen | ++------------------+----------------------------------------------+ +| simple_pooled_db | Einfaches Pooling für DB-API 2 | ++------------------+----------------------------------------------+ + ++------------------+----------------------------------------------+ +| Variante speziell für den klassischen PyGreSQL-Adapter | ++==================+==============================================+ +| steady_pg | Gehärtete klassische PyGreSQL-Verbindungen | ++------------------+----------------------------------------------+ +| pooled_pg | Pooling für klassische PyGreSQL-Verbindungen | ++------------------+----------------------------------------------+ +| persistent_pg | Persistente klassische PyGreSQL-Verbindungen | ++------------------+----------------------------------------------+ +| simple_pooled_pg | Einfaches Pooling für klassisches PyGreSQL | ++------------------+----------------------------------------------+ Die Abhängigkeiten der Module in der Variante für beliebige DB-API-2-Adapter sind im folgenden Diagramm dargestellt: -.. image:: dbdep.gif +.. image:: dependencies_db.png Die Abhängigkeiten der Module in der Variante für den klassischen PyGreSQL-Adapter sehen ähnlich aus: -.. image:: pgdep.gif +.. image:: depdependencies_pg.png Download @@ -79,39 +81,24 @@ Das Source-Code-Repository befindet sich hier auf GitHub:: Installation ============ -Installation als eigenständiges Paket -------------------------------------- -Wenn Sie DBUtils für andere Anwendungen als Webware for Python verwenden -möchten, empfiehlt es sich, das Paket auf die übliche Weise zu installieren:: +Installation +------------ +Das Paket kann auf die übliche Weise installiert werden:: python setup.py install -Sie können auch `pip`_ für Download und Installation verwenden:: +Noch einfacher ist, das Paket in einem Schritt mit `pip`_ automatisch +herunterzuladen und zu installieren:: pip install DBUtils .. _pip: https://pip.pypa.io/ -Installation als Unterpaket (Plug-In) von Webware for Python ------------------------------------------------------------- -Wenn Sie DBUtils nur als Ergänzung für das Web-Framework Webware for Python -verwenden wollen, sollten Sie DBUtils als Webware-Plug-In installieren:: - - python setup.py install --install-lib=/pfad/zu/Webware - -Ersetzen Sie ``/pfad/zu/Webware`` hierbei durch den Pfad zum Wurzelverzeichnis -der Installation von Webware for Python. Sie müssen auch das Installationsskript -von Webware for Python laufen lassen, wenn dies noch nicht geschehen ist, oder -wenn Sie DBUtils in die Webware-Dokumentation integrieren wollen:: - - cd /pfad/zu/Webware - python install.py - Anforderungen ============= -DBUtils unterstützt die Python_ Versionen 2.7 und 3.5 bis 3.8. +DBUtils unterstützt die Python_ Versionen 3.7 bis 3.12. Die Module in der Variante für klassisches PyGreSQL benötigen PyGreSQL_ Version 4.0 oder höher, während die Module in der allgemeinen Variante @@ -125,26 +112,31 @@ Funktionalität Dieser Abschnitt verwendet nur die Bezeichnungen der DB-API-2-Variante, aber Entsprechendes gilt auch für die PyGreSQL-Variante. - -SimplePooledDB --------------- -``DBUtils.SimplePooledDB`` ist eine sehr elementare Referenz-Implementierung -eines Pools von Datenbankverbindungen. Hiermit ist ein Vorratsspeicher an -Datenbankverbindungen gemeint, aus dem sich die Python-Anwendung bedienen kann. -Diese Implementierung ist weit weniger ausgefeilt als das eigentliche -``PooledDB``-Modul und stellt insbesondere keine Ausfallsicherung zur Verfügung. -``DBUtils.SimplePooledDB`` ist im Wesentlichen identisch mit dem zu Webware for -Python gehörenden Modul ``MiscUtils.DBPool``. Es ist eher zur Verdeutlichung -des Konzepts gedacht, als zum Einsatz im produktiven Betrieb. - -SteadyDB --------- -``DBUtils.SteadyDB`` ist ein Modul, das "gehärtete" Datenbankverbindungen -bereitstellt, denen gewöhnlichen Verbindungen eines DB-API-2-Datenbankadapters -zugrunde liegen. Eine "gehärtete" Verbindung wird bei Zugriff automatisch, -ohne dass die Anwendung dies bemerkt, wieder geöffnet, wenn sie geschlossen -wurde, die Datenbankverbindung unterbrochen wurde, oder wenn sie öfter als -ein optionales Limit genutzt wurde. +DBUtils installiert sich als Paket ``dbutils``, das alle hier beschriebenen +Module enthält. Jedes dieser Modul enthält im Wesentlichen eine Klasse, die +einen analogen Namen trägt und die jeweilige Funktionalität bereitstellt. +So enthält z.B. das Modul ``dbutils.pooled_db`` die Klasse ``PooledDB``. + +SimplePooledDB (simple_pooled_db) +--------------------------------- +Die Klasse ``SimplePooledDB`` in ``dbutils.simple_pooled_db`` ist eine sehr +elementare Referenz-Implementierung eines Pools von Datenbankverbindungen. +Hiermit ist ein Vorratsspeicher an Datenbankverbindungen gemeint, aus dem sich +die Python-Anwendung bedienen kann. Diese Implementierung ist weit weniger +ausgefeilt als das eigentliche ``pooled_db``-Modul und stellt insbesondere +keine Ausfallsicherung zur Verfügung. ``dbutils.simple_pooled_db`` ist im +Wesentlichen identisch mit dem zu Webware for Python gehörenden Modul +``MiscUtils.DBPool``. Es ist eher zur Verdeutlichung des Konzepts gedacht, +als zum Einsatz im produktiven Betrieb. + +SteadyDBConnection (steady_db) +------------------------------ +Die Klasse ``SteadyDBConnection`` im Modul ``dbutils.steady_db`` stellt +"gehärtete" Datenbankverbindungen bereit, denen gewöhnlichen Verbindungen +eines DB-API-2-Datenbankadapters zugrunde liegen. Eine "gehärtete" Verbindung +wird bei Zugriff automatisch, ohne dass die Anwendung dies bemerkt, wieder +geöffnet, wenn sie geschlossen wurde, die Datenbankverbindung unterbrochen +wurde, oder wenn sie öfter als ein optionales Limit genutzt wurde. Ein typisches Beispiel wo dies benötig wird, ist, wenn die Datenbank neu gestartet wurde, während Ihre Anwendung immer noch läuft und Verbindungen @@ -152,22 +144,22 @@ zur Datenbank offen hat, oder wenn Ihre Anwendung auf eine entfernte Datenbank über ein Netzwerk zugreift, das durch eine Firewall geschützt ist, und die Firewall neu gestartet wurde und dabei ihren Verbindungsstatus verloren hat. -Normalerweise benutzen Sie das ``SteadyDB``-Modul nicht direkt; es wird aber -von den beiden nächsten Modulen benötigt, ``PersistentDB`` und ``PooledDB``. +Normalerweise benutzen Sie das ``steady_db``-Modul nicht direkt; es wird aber +von den beiden nächsten Modulen benötigt, ``persistent_db`` und ``pooled_db``. -PersistentDB ------------- -``DBUtils.PersistentDB`` stellt gehärtete, thread-affine, persistente -Datenbankverbindungen zur Verfügung, unter Benutzung eines beliebigen -DB-API-2-Datenbankadapters. Mit "thread-affin" und "persistent" ist -hierbei gemeint, dass die einzelnen Datenbankverbindungen den jeweiligen -Threads fest zugeordnet bleiben und während der Laufzeit des Threads nicht -geschlossen werden. +PersistentDB (persistent_db) +---------------------------- +Die Klasse ``PersistentDB`` im Modul ``dbutils.persistent_db`` stellt +gehärtete, thread-affine, persistente Datenbankverbindungen zur Verfügung, +unter Benutzung eines beliebigen DB-API-2-Datenbankadapters. Mit "thread-affin" +und "persistent" ist hierbei gemeint, dass die einzelnen Datenbankverbindungen +den jeweiligen Threads fest zugeordnet bleiben und während der Laufzeit des +Threads nicht geschlossen werden. Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie -``PersistentDB``-Datenbankverbindungen einsetzen: +``persistent_db``-Datenbankverbindungen einsetzen: -.. image:: persist.gif +.. image:: persistent.png Immer wenn ein Thread eine Datenbankverbindung zum ersten Mal öffnet, wird eine neue Datenbankverbindung geöffnet, die von da an immer wieder für genau @@ -177,29 +169,29 @@ gleiche Thread wieder eine Datenbankverbindung anfordert, diese gleiche bereits geöffnete Datenbankverbindung wieder verwendet werden kann. Die Verbindung wird automatisch geschlossen, wenn der Thread beendet wird. -Kurz gesagt versucht ``PersistentDB`` Datenbankverbindungen wiederzuverwerten, +Kurz gesagt versucht ``persistent_db`` Datenbankverbindungen wiederzuverwerten, um die Gesamteffizienz der Datenbankzugriffe Ihrer Multithread-Anwendungen zu steigern, aber es wird dabei sichergestellt, dass verschiedene Threads niemals die gleiche Verbindung benutzen. -Daher arbeitet ``PersistentDB`` sogar dann problemlos, wenn der zugrunde +Daher arbeitet ``persistent_db`` sogar dann problemlos, wenn der zugrunde liegende DB-API-2-Datenbankadapter nicht thread-sicher auf der Verbindungsebene ist, oder wenn parallele Threads Parameter der Datenbank-Sitzung verändern oder Transaktionen mit mehreren SQL-Befehlen durchführen. -PooledDB --------- -``DBUtils.PooledDB`` stellt, unter Benutzung eines beliebigen -DB-API-2-Datenbankadapters, einen Pool von gehärteten, thread-sicheren -Datenbankverbindungen zur Verfügung, die automatisch, ohne dass die Anwendung -dies bemerkt, wiederverwendet werden. +PooledDB (pooled_db) +-------------------- +Die Klasse ``PooledDB`` im Modul ``dbutils.pooled_db`` stellt, unter Benutzung +eines beliebigen DB-API-2-Datenbankadapters, einen Pool von gehärteten, +thread-sicheren Datenbankverbindungen zur Verfügung, die automatisch, ohne dass +die Anwendung dies bemerkt, wiederverwendet werden. Das folgende Diagramm zeigt die beteiligten Verbindungsschichten, wenn Sie -``PooledDB``-Datenbankverbindungen einsetzen: +``pooled_db``-Datenbankverbindungen einsetzen: -.. image:: pool.gif +.. image:: pooled.png -Wie im Diagramm angedeutet, kann ``PooledDB`` geöffnete Datenbankverbindungen +Wie im Diagramm angedeutet, kann ``pooled_db`` geöffnete Datenbankverbindungen den verschiedenen Threads beliebig zuteilen. Dies geschieht standardmäßig, wenn Sie den Verbindungspool mit einem positiven Wert für ``maxshared`` einrichten und der zugrunde liegende DB-API-2-Datenbankadapter auf der Verbindungsebene @@ -216,24 +208,24 @@ Datenbankverbindungen zurückgegeben, damit sie wiederverwertet werden kann. Wenn der zugrunde liegende DB-API-Datenbankadapter nicht thread-sicher ist, werden Thread-Locks verwendet, um sicherzustellen, dass die -``PooledDB``-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also +``pooled_db``-Verbindungen dennoch thread-sicher sind. Sie brauchen sich also hierum keine Sorgen zu machen, aber Sie sollten darauf achten, dedizierte Datenbankverbindungen zu verwenden, sobald Sie Parameter der Datenbanksitzung verändern oder Transaktionen mit mehreren SQL-Befehlen ausführen. Die Qual der Wahl ----------------- -Sowohl ``PersistentDB`` als auch ``PooledDB`` dienen dem gleichen Zweck, +Sowohl ``persistent_db`` als auch ``pooled_db`` dienen dem gleichen Zweck, nämlich die Effizienz des Datenbankzugriffs durch Wiederverwendung von Datenbankverbindungen zu steigern, und dabei gleichzeitig die Stabilität zu gewährleisten, selbst wenn die Datenbankverbindung unterbrochen wird. Welches der beiden Module sollte also verwendet werden? Nach den obigen -Erklärungen ist es klar, dass ``PersistentDB`` dann sinnvoller ist, wenn +Erklärungen ist es klar, dass ``persistent_db`` dann sinnvoller ist, wenn Ihre Anwendung eine gleich bleibende Anzahl Threads verwendet, die häufig auf die Datenbank zugreifen. In diesem Fall werden Sie ungefähr die gleiche Anzahl geöffneter Datenbankverbindungen erhalten. Wenn jedoch Ihre Anwendung -häufig Threads beendet und neu startet, dann ist ``PooledDB`` die bessere +häufig Threads beendet und neu startet, dann ist ``pooled_db`` die bessere Lösung, die auch mehr Möglichkeiten zur Feineinstellung zur Verbesserung der Effizienz erlaubt, insbesondere bei Verwendung eines thread-sicheren DB-API-2-Datenbankadapters. @@ -250,19 +242,19 @@ der Initialisierung auch einige Unterschiede, sowohl zwischen den "Pooled"- und den "Persistent"-Varianten, als auch zwischen den DB-API-2- und den PyGreSQL-Varianten. -Wir werden hier nur auf das ``PersistentDB``-Modul und das etwas kompliziertere -``PooledDB``-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie +Wir werden hier nur auf das ``persistent_db``-Modul und das etwas kompliziertere +``pooled_db``-Modul eingehen. Einzelheiten zu den anderen Modulen finden Sie in deren Docstrings. Unter Verwendung der Python-Interpreter-Konsole können Sie -sich die Dokumentation des ``PooledDB``-Moduls wie folgt anzeigen lassen (dies +sich die Dokumentation des ``pooled_db``-Moduls wie folgt anzeigen lassen (dies funktioniert entsprechend auch mit den anderen Modulen):: - help(PooledDB) + help(pooled_db) -PersistentDB ------------- -Wenn Sie das ``PersistentDB``-Modul einsetzen möchten, müssen Sie zuerst einen +PersistentDB (persistent_db) +---------------------------- +Wenn Sie das ``persistent_db``-Modul einsetzen möchten, müssen Sie zuerst einen Generator für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, -indem Sie eine Instanz der Klasse ``PersistentDB`` erzeugen, wobei Sie folgende +indem Sie eine Instanz der Klasse ``persistent_db`` erzeugen, wobei Sie folgende Parameter angeben müssen: * ``creator``: entweder eine Funktion, die neue DB-API-2-Verbindungen @@ -277,9 +269,10 @@ Parameter angeben müssen: * ``setsession``: eine optionale Liste von SQL-Befehlen zur Initialisierung der Datenbanksitzung, z.B. ``["set datestyle to german", ...]`` -* ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions +* ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe - (OperationalError, InternalError) nicht geeignet sein sollte + (OperationalError, InterfaceError, InternalError) für das verwendete + Datenbankadapter-Modul nicht geeignet sein sollte * ``ping``: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ``ping()``-Methode geprüft werden, falls eine solche vorhanden ist @@ -305,7 +298,7 @@ möchten, dass jede Verbindung Ihrer lokalen Datenbank ``meinedb`` 1000 mal wiederverwendet werden soll, sieht die Initialisierung so aus:: import pgdb # importiere das verwendete DB-API-2-Modul - from DBUtils.PersistentDB import PersistentDB + from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='meinedb') Nachdem Sie den Generator mit diesen Parametern eingerichtet haben, können @@ -315,7 +308,7 @@ Sie derartige Datenbankverbindungen von da an wie folgt anfordern:: Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die -"gehärtete" ``SteadyDB``-Version der zugrunde liegenden DB-API-2-Verbindung. +"gehärtete" ``steady_db``-Version der zugrunde liegenden DB-API-2-Verbindung. Wenn Sie eine solche persistente Verbindung mit ``db.close()`` schließen, wird dies stillschweigend ignoriert, denn sie würde beim nächsten Zugriff @@ -324,11 +317,12 @@ Stattdessen wird die Verbindung automatisch dann geschlossen, wenn der Thread endet. Sie können dieses Verhalten ändern, indem Sie den Parameter namens ``closeable`` setzen. -Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode -``begin()`` eingeleiten werden müssen. Hierdurch wird sichergestellt, dass -das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion -ausgesetzt wird, und dass die Verbindung zurückgerollt wird, before sie vom -gleichen Thread erneut benutzt wird. +.. warning:: + Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode + ``begin()`` eingeleitet werden müssen. Hierdurch wird sichergestellt, dass + das transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion + ausgesetzt wird, und dass die Verbindung zurückgerollt wird, before sie vom + gleichen Thread erneut benutzt wird. Das Holen einer Verbindung kann etwas beschleunigt werden, indem man den Parameter ``threadlocal`` auf ``threading.local`` setzt; dies könnte aber in @@ -336,11 +330,11 @@ einigen Umgebungen nicht funktionieren (es ist zum Beispiel bekannt, dass ``mod_wsgi`` hier Probleme bereitet, da es Daten, die mit ``threading.local`` gespeichert wurden, zwischen Requests löscht). -PooledDB --------- -Wenn Sie das ``PooledDB``-Modul einsetzen möchten, müssen Sie zuerst einen +PooledDB (pooled_db) +-------------------- +Wenn Sie das ``pooled_db``-Modul einsetzen möchten, müssen Sie zuerst einen Pool für die von Ihnen gewünschte Art von Datenbankverbindungen einrichten, -indem Sie eine Instanz der Klasse ``PooledDB`` erzeugen, wobei Sie folgende +indem Sie eine Instanz der Klasse ``pooled_db`` erzeugen, wobei Sie folgende Parameter angeben müssen: * ``creator``: entweder eine Funktion, die neue DB-API-2-Verbindungen @@ -386,9 +380,10 @@ Parameter angeben müssen: um mit ``begin()`` gestartete Transaktionen zurückzurollen, der Standardwert ``True`` rollt sicherheitshalber mögliche Transaktionen immer zurück) -* ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions +* ``failures``: eine optionale Exception-Klasse oder ein Tupel von Exceptions, bei denen die Ausfallsicherung zum Tragen kommen soll, falls die Vorgabe - (OperationalError, InternalError) nicht geeignet sein sollte + (OperationalError, InterfaceError, InternalError) für das verwendete + Datenbankadapter-Modul nicht geeignet sein sollte * ``ping``: mit diesem Parameter kann eingestellt werden, wann Verbindungen mit der ``ping()``-Methode geprüft werden, falls eine solche vorhanden ist @@ -407,7 +402,7 @@ und einen Pool von mindestens fünf Datenbankverbindungen zu Ihrer Datenbank ``meinedb`` verwenden möchten, dann sieht die Initialisierung so aus:: import pgdb # importiere das verwendete DB-API-2-Modul - from DBUtils.PooledDB import PooledDB + from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='meinedb') Nachdem Sie den Pool für Datenbankverbindungen so eingerichtet haben, können @@ -417,7 +412,7 @@ Sie Verbindungen daraus wie folgt anfordern:: Sie können diese Verbindungen verwenden, als wären sie gewöhnliche DB-API-2-Datenbankverbindungen. Genauer genommen erhalten Sie die -"gehärtete" ``SteadyDB``-Version der zugrunde liegenden DB-API-2-Verbindung. +"gehärtete" ``steady_db``-Version der zugrunde liegenden DB-API-2-Verbindung. Bitte beachten Sie, dass die Verbindung von anderen Threads mitgenutzt werden kann, wenn Sie den Parameter ``maxshared`` auf einen Wert größer als Null @@ -451,38 +446,43 @@ sie gebraucht werden, etwa so:: cur.close() # oder del cur db.close() # oder del db -Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode -``begin()`` eingeleiten werden müssen. Hierdurch wird sichergestellt, -dass die Verbindung nicht mehr mit anderen Threads geteilt wird, dass das -transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion -ausgesetzt wird, und dass die Verbindung zurückgerollt wird, bevor sie -wieder an den Verbindungspool zurückgegeben wird. - -Benutzung in Webware for Python -------------------------------- -Wenn Sie DBUtils verwenden, um von Servlets des Web-Frameworks `Webware -for Python`_ auf eine Datenbank zuzugreifen, dann müssen Sie sicherstellen, -dass die Generatoren zur Erzeugung von Datenbankverbindungen nur einmal -eingerichtet werden, wenn die Anwendung startet, und nicht jedes Mal, wenn -eine Servlet-Instanz erzeugt wird. Den hierfür nötigen Code können Sie -bei der Basis-Servlet-Klasse einfügen, dort wo das Modul oder die Klasse -initialisiert wird, oder Sie können die Funktion ``contextInitialize()`` -im ``__init__.py``-Skript Ihres Anwendungskontextes verwenden. - -Das zusammen mit DButils ausgelieferte Verzeichnis ``Examples`` enthält -einen Beispielkontext für Webware for Python, der eine kleine Demo-Datenbank -verwendet, um Teilnehmer an einer Seminarreihe zu verwalten (die Idee für -dieses Beispiel wurde dem Artikel "`The Python DB-API`_" von Andrew Kuchling -entnommen). - -Der Beispielkontext kann konfiguriert werden, indem entweder eine Konfig-Datei -``Configs/Database.config`` angelegt wird, oder indem die Standard-Parameter -direkt im Beispielservlet ``Examples/DBUtilsExample.py`` geändert werden. -Auf diese Weise können Sie einen passenden Datenbanknutzer und sein Passwort -festlegen, sowie den zugrunde liegenden Datenbankadapter auswählen (das -klassische PyGreSQL-Modul oder irgendein DB-API-2-Modul). Wenn der Parameter -``maxcached`` vorhanden ist, verwendet das Beispielservlet die -``Pooled``-Variante, andernfalls die ``Persistent``-Variante. +Sie können dies auch durch Verwendung von Kontext-Managern vereinfachen:: + + with pool.connection() as db: + with db.cursor() as cur: + cur.execute(...) + res = cur.fetchone() + +.. warning:: + Bitte beachten Sie, dass Transaktionen explizit durch Aufruf der Methode + ``begin()`` eingeleitet werden müssen. Hierdurch wird sichergestellt, + dass die Verbindung nicht mehr mit anderen Threads geteilt wird, dass das + transparente Neueröffnen von Verbindungen bis zum Ende der Transaktion + ausgesetzt wird, und dass die Verbindung zurückgerollt wird, bevor sie + wieder an den Verbindungspool zurückgegeben wird. + + +Besonderheiten bei der Benutzung +================================ +Manchmal möchte man Datenbankverbindung besonders vorbereiten, bevor sie +von DBUtils verwendet werden, und dies ist nicht immer durch Verwendung +der passenden Parameter möglich. Zum Beispiel kann es ``pyodb`` erfordern, +dass man die Methode ``setencoding()`` der Datenbankverbindung aufruft. +Sie können dies erreichen, indem Sie eine modifizierte Version der +Funktion ``connect()`` verwenden und diese als ``creator`` (dem ersten +Argument) an ``PersistentDB`` oder ``PooledDB`` übergeben, etwa so:: + + from pyodbc import connect + from dbutils.pooled_db import PooledDB + + def creator(): + con = connect(...) + con.setdecoding(...) + return con + + creator.dbapi = pyodbc + + db_pool = PooledDB(creator, mincached=5) Anmerkungen @@ -496,8 +496,8 @@ ausgelagert, in der Code von DBUtils verwendet wird. Wenn Sie eine Lösung verwenden wie den Apache-Webserver mit mod_python_ oder mod_wsgi_, dann sollten Sie bedenken, dass Ihr Python-Code normalerweise im Kontext der Kindprozesse des Webservers läuft. Wenn Sie also das -``PooledDB``-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann -werden Sie ebensoviele Pools mit Datenbankverbindungen erhalten. Wenn diese +``pooled_db``-Modul einsetzen, und mehrere dieser Kindprozesse laufen, dann +werden Sie ebenso viele Pools mit Datenbankverbindungen erhalten. Wenn diese Prozesse viele Threads laufen lassen, dann mag dies eine sinnvoller Ansatz sein, wenn aber diese Prozesse nicht mehr als einen Worker-Thread starten, wie im Fall des Multi-Processing Moduls "prefork" für den Apache-Webserver, @@ -512,7 +512,7 @@ Einige Ideen für zukünftige Verbesserungen: * Alternativ zur Obergrenze in der Anzahl der Nutzung einer Datenbankverbindung könnte eine maximale Lebensdauer für die Verbindung implementiert werden. -* Es könnten Module ``MonitorDB`` und ``MonitorPg`` hinzugefügt werden, die +* Es könnten Module ``monitor_db`` und ``monitor_pg`` hinzugefügt werden, die in einem separaten Thread ständig den "idle pool" und eventuell auch den "shared pool" bzw. die persistenten Verbindungen überwachen. Wenn eine unterbrochene Datenbankverbindung entdeckt wird, wird diese automatisch durch @@ -530,12 +530,12 @@ Einige Ideen für zukünftige Verbesserungen: Fehlermeldungen und Feedback ============================ -Bitte Senden Sie Fehlermeldungen, Patches und Feedback direkt an den -Autor (unter Verwendung der unten angegebenen E-Mail-Adresse). - -Probleme, die Webware betreffen, können auch in der `Webware for Python -mailing list`_ diskutiert werden. +Fehlermeldungen, Patches und Feedback können Sie als Issues_ oder +`Pull Requests`_ auf der `GitHub-Projektseite`_ von DBUtils übermitteln. +.. _GitHub-Projektseite: https://github.com/WebwareForPython/DBUtils +.. _Issues: https://github.com/WebwareForPython/DBUtils/issues +.. _Pull Requests: https://github.com/WebwareForPython/DBUtils/pulls Links ===== @@ -555,24 +555,23 @@ Einige Links zu verwandter und alternativer Software: .. _DBUtils: https://github.com/WebwareForPython/DBUtils .. _Python: https://www.python.org .. _Webware for Python: https://webwareforpython.github.io/w4py/ -.. _Webware for Python mailing list: https://lists.sourceforge.net/lists/listinfo/webware-discuss .. _DB-API 2: https://www.python.org/dev/peps/pep-0249/ .. _The Python DB-API: http://www.linuxjournal.com/article/2605 .. _PostgresQL: https://www.postgresql.org/ -.. _PyGreSQL: http://www.pygresql.org/ -.. _SQLObject: http://www.sqlobject.org/ -.. _SQLAlchemy: http://www.sqlalchemy.org -.. _Apache: http://httpd.apache.org/ +.. _PyGreSQL: https://www.pygresql.org/ +.. _SQLObject: http://sqlobject.org/ +.. _SQLAlchemy: https://www.sqlalchemy.org +.. _Apache: https://httpd.apache.org/ .. _mod_python: http://modpython.org/ .. _mod_wsgi: https://github.com/GrahamDumpleton/mod_wsgi -.. _pgpool: http://www.pgpool.net/ +.. _pgpool: https://www.pgpool.net/ .. _pgbouncer: https://pgbouncer.github.io/ Autoren ======= -:Autor: Christoph Zwerschke <cito@online.de> +:Autor: `Christoph Zwerschke`_ :Beiträge: DBUtils benutzt Code, Anmerkungen und Vorschläge von Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), @@ -580,11 +579,12 @@ Autoren Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero und Josef van Eenbergen. +.. _Christoph Zwerschke: https://github.com/Cito Copyright und Lizenz ==================== -Copyright © 2005-2018 Christoph Zwerschke. +Copyright © 2005-2024 Christoph Zwerschke. Alle Rechte vorbehalten. DBUtils ist freie und quelloffene Software, diff --git a/DBUtils/Docs/UsersGuide.html b/docs/main.html similarity index 60% rename from DBUtils/Docs/UsersGuide.html rename to docs/main.html index 2ec476e..dede0dc 100644 --- a/DBUtils/Docs/UsersGuide.html +++ b/docs/main.html @@ -1,226 +1,210 @@ <!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> -<meta charset="utf-8"/> -<meta name="generator" content="Docutils 0.15.2: http://docutils.sourceforge.net/" /> +<meta charset="utf-8" /> +<meta name="generator" content="Docutils 0.21.2: https://docutils.sourceforge.io/" /> +<meta name="viewport" content="width=device-width, initial-scale=1" /> <title>DBUtils User's Guide</title> -<link rel="stylesheet" href="Doc.css" type="text/css" /> +<link rel="stylesheet" href="doc.css" type="text/css" /> </head> -<body> -<div class="document" id="dbutils-user-s-guide"> +<body class="with-toc"> +<main id="dbutils-user-s-guide"> <h1 class="title">DBUtils User's Guide</h1> <dl class="docinfo simple"> -<dt class="version">Version</dt> -<dd class="version">1.4</dd> -<dt class="released">Released</dt> -<dd class="released"><p>09/26/20</p> -</dd> -<dt class="translations">Translations</dt> -<dd class="translations"><p>English | <a class="reference external" href="UsersGuide.de.html">German</a></p> +<dt class="version">Version<span class="colon">:</span></dt> +<dd class="version">3.1.0</dd> +<dt class="translations">Translations<span class="colon">:</span></dt> +<dd class="translations"><p>English | <a class="reference external" href="main.de.html">German</a></p> </dd> </dl> -<div class="contents topic" id="contents"> -<p class="topic-title first">Contents</p> +<nav class="contents" id="contents" role="doc-toc"> +<p class="topic-title">Contents</p> <ul class="simple"> -<li><p><a class="reference internal" href="#synopsis" id="id4">Synopsis</a></p></li> -<li><p><a class="reference internal" href="#modules" id="id5">Modules</a></p></li> -<li><p><a class="reference internal" href="#download" id="id6">Download</a></p></li> -<li><p><a class="reference internal" href="#installation" id="id7">Installation</a></p> +<li><p><a class="reference internal" href="#synopsis" id="toc-entry-1">Synopsis</a></p></li> +<li><p><a class="reference internal" href="#modules" id="toc-entry-2">Modules</a></p></li> +<li><p><a class="reference internal" href="#download" id="toc-entry-3">Download</a></p></li> +<li><p><a class="reference internal" href="#installation" id="toc-entry-4">Installation</a></p> <ul> -<li><p><a class="reference internal" href="#installation-as-a-standalone-top-level-package" id="id8">Installation as a standalone (top-level) package</a></p></li> -<li><p><a class="reference internal" href="#installation-as-a-webware-for-python-subpackage-plug-in" id="id9">Installation as a Webware for Python subpackage (plug-in)</a></p></li> +<li><p><a class="reference internal" href="#installation-1" id="toc-entry-5">Installation</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#requirements" id="id10">Requirements</a></p></li> -<li><p><a class="reference internal" href="#functionality" id="id11">Functionality</a></p> +<li><p><a class="reference internal" href="#requirements" id="toc-entry-6">Requirements</a></p></li> +<li><p><a class="reference internal" href="#functionality" id="toc-entry-7">Functionality</a></p> <ul> -<li><p><a class="reference internal" href="#simplepooleddb" id="id12">SimplePooledDB</a></p></li> -<li><p><a class="reference internal" href="#steadydb" id="id13">SteadyDB</a></p></li> -<li><p><a class="reference internal" href="#persistentdb" id="id14">PersistentDB</a></p></li> -<li><p><a class="reference internal" href="#pooleddb" id="id15">PooledDB</a></p></li> -<li><p><a class="reference internal" href="#which-one-to-use" id="id16">Which one to use?</a></p></li> +<li><p><a class="reference internal" href="#simplepooleddb-simple-pooled-db" id="toc-entry-8">SimplePooledDB (simple_pooled_db)</a></p></li> +<li><p><a class="reference internal" href="#steadydbconnection-steady-db" id="toc-entry-9">SteadyDBConnection (steady_db)</a></p></li> +<li><p><a class="reference internal" href="#persistentdb-persistent-db" id="toc-entry-10">PersistentDB (persistent_db)</a></p></li> +<li><p><a class="reference internal" href="#pooleddb-pooled-db" id="toc-entry-11">PooledDB (pooled_db)</a></p></li> +<li><p><a class="reference internal" href="#which-one-to-use" id="toc-entry-12">Which one to use?</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#usage" id="id17">Usage</a></p> +<li><p><a class="reference internal" href="#usage" id="toc-entry-13">Usage</a></p> <ul> -<li><p><a class="reference internal" href="#id1" id="id18">PersistentDB</a></p></li> -<li><p><a class="reference internal" href="#id2" id="id19">PooledDB</a></p></li> -<li><p><a class="reference internal" href="#usage-in-webware-for-python" id="id20">Usage in Webware for Python</a></p></li> +<li><p><a class="reference internal" href="#persistentdb-persistent-db-1" id="toc-entry-14">PersistentDB (persistent_db)</a></p></li> +<li><p><a class="reference internal" href="#pooleddb-pooled-db-1" id="toc-entry-15">PooledDB (pooled_db)</a></p></li> </ul> </li> -<li><p><a class="reference internal" href="#notes" id="id21">Notes</a></p></li> -<li><p><a class="reference internal" href="#future" id="id22">Future</a></p></li> -<li><p><a class="reference internal" href="#bug-reports-and-feedback" id="id23">Bug reports and feedback</a></p></li> -<li><p><a class="reference internal" href="#links" id="id24">Links</a></p></li> -<li><p><a class="reference internal" href="#credits" id="id25">Credits</a></p></li> -<li><p><a class="reference internal" href="#copyright-and-license" id="id26">Copyright and License</a></p></li> +<li><p><a class="reference internal" href="#advanced-usage" id="toc-entry-16">Advanced Usage</a></p></li> +<li><p><a class="reference internal" href="#notes" id="toc-entry-17">Notes</a></p></li> +<li><p><a class="reference internal" href="#future" id="toc-entry-18">Future</a></p></li> +<li><p><a class="reference internal" href="#bug-reports-and-feedback" id="toc-entry-19">Bug reports and feedback</a></p></li> +<li><p><a class="reference internal" href="#links" id="toc-entry-20">Links</a></p></li> +<li><p><a class="reference internal" href="#credits" id="toc-entry-21">Credits</a></p></li> +<li><p><a class="reference internal" href="#copyright-and-license" id="toc-entry-22">Copyright and License</a></p></li> </ul> -</div> -<div class="section" id="synopsis"> -<h1>Synopsis</h1> +</nav> +<section id="synopsis"> +<h2>Synopsis</h2> <p><a class="reference external" href="https://github.com/WebwareForPython/DBUtils">DBUtils</a> is a suite of Python modules allowing to connect in a safe and -efficient way between a threaded <a class="reference external" href="https://www.python.org">Python</a> application and a database. DBUtils -has been written in view of <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> as the application and -<a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> as the adapter to a <a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a> database, but it can be used -for any other Python application and <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> conformant database adapter.</p> -</div> -<div class="section" id="modules"> -<h1>Modules</h1> +efficient way between a threaded <a class="reference external" href="https://www.python.org">Python</a> application and a database.</p> +<p>DBUtils has been originally written particularly for <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> as +the application and <a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> as the adapter to a <a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a> database, but it +can meanwhile be used for any other Python application and <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> +conformant database adapter.</p> +</section> +<section id="modules"> +<h2>Modules</h2> <p>The DBUtils suite is realized as a Python package containing two subsets of modules, one for use with arbitrary DB-API 2 modules, the other one for use with the classic PyGreSQL module.</p> <table> -<colgroup> -<col style="width: 31%" /> -<col style="width: 69%" /> -</colgroup> <thead> <tr><th class="head" colspan="2"><p>Universal DB-API 2 variant</p></th> </tr> </thead> <tbody> -<tr><td><p>SteadyDB.py</p></td> +<tr><td><p>steady_db</p></td> <td><p>Hardened DB-API 2 connections</p></td> </tr> -<tr><td><p>PooledDB.py</p></td> +<tr><td><p>pooled_db</p></td> <td><p>Pooling for DB-API 2 connections</p></td> </tr> -<tr><td><p>PersistentDB.py</p></td> +<tr><td><p>persistent_db</p></td> <td><p>Persistent DB-API 2 connections</p></td> </tr> -<tr><td><p>SimplePooledDB.py</p></td> +<tr><td><p>simple_pooled_db</p></td> <td><p>Simple pooling for DB-API 2</p></td> </tr> </tbody> </table> <table> -<colgroup> -<col style="width: 31%" /> -<col style="width: 69%" /> -</colgroup> <thead> <tr><th class="head" colspan="2"><p>Classic PyGreSQL variant</p></th> </tr> </thead> <tbody> -<tr><td><p>SteadyPg.py</p></td> +<tr><td><p>steady_pg</p></td> <td><p>Hardened classic PyGreSQL connections</p></td> </tr> -<tr><td><p>PooledPg.py</p></td> +<tr><td><p>pooled_pg</p></td> <td><p>Pooling for classic PyGreSQL connections</p></td> </tr> -<tr><td><p>PersistentPg.py</p></td> +<tr><td><p>persistent_pg</p></td> <td><p>Persistent classic PyGreSQL connections</p></td> </tr> -<tr><td><p>SimplePooledPg.py</p></td> +<tr><td><p>simple_pooled_pg</p></td> <td><p>Simple pooling for classic PyGreSQL</p></td> </tr> </tbody> </table> <p>The dependencies of the modules in the universal DB-API 2 variant are as indicated in the following diagram:</p> -<img alt="dbdep.gif" src="dbdep.gif" /> +<img alt="dependencies_db.png" src="dependencies_db.png" /> <p>The dependencies of the modules in the classic PyGreSQL variant are similar:</p> -<img alt="pgdep.gif" src="pgdep.gif" /> -</div> -<div class="section" id="download"> -<h1>Download</h1> +<img alt="dependencies_pg.png" src="dependencies_pg.png" /> +</section> +<section id="download"> +<h2>Download</h2> <p>You can download the actual version of DBUtils from the Python Package Index at:</p> <pre class="literal-block">https://pypi.python.org/pypi/DBUtils</pre> <p>The source code repository can be found here on GitHub:</p> <pre class="literal-block">https://github.com/WebwareForPython/DBUtils</pre> -</div> -<div class="section" id="installation"> -<h1>Installation</h1> -<div class="section" id="installation-as-a-standalone-top-level-package"> -<h2>Installation as a standalone (top-level) package</h2> -<p>If you intend to use DBUtils from other applications than Webware for Python, -it is recommended to install the package in the usual way:</p> +</section> +<section id="installation"> +<h2>Installation</h2> +<section id="installation-1"> +<h3>Installation</h3> +<p>The package can be installed in the usual way:</p> <pre class="literal-block">python setup.py install</pre> -<p>You can also use <a class="reference external" href="https://pip.pypa.io/">pip</a> for download and installation:</p> +<p>It is even easier to download and install the package in one go using <a class="reference external" href="https://pip.pypa.io/">pip</a>:</p> <pre class="literal-block">pip install DBUtils</pre> -</div> -<div class="section" id="installation-as-a-webware-for-python-subpackage-plug-in"> -<h2>Installation as a Webware for Python subpackage (plug-in)</h2> -<p>If you want to use DBUtils as a supplement for the Webware for Python -framework only, you should install it as a Webware plug-in:</p> -<pre class="literal-block">python setup.py install --install-lib=/path/to/Webware</pre> -<p>Replace <span class="docutils literal">/path/to/Webware</span> with the path to the root directory of -your Webware for Python installation. You will also need to run the -Webware installer if this has not been done already or if you want to -integrate the DBUtils documentation into the Webware documentation:</p> -<pre class="literal-block">cd path/to/Webware -python install.py</pre> -</div> -</div> -<div class="section" id="requirements"> -<h1>Requirements</h1> -<p>DBUtils supports <a class="reference external" href="https://www.python.org">Python</a> version 2.6 and Python versions 3.5 to 3.8.</p> -<p>The modules in the classic PyGreSQL variant need <a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> version 4.0 +</section> +</section> +<section id="requirements"> +<h2>Requirements</h2> +<p>DBUtils supports <a class="reference external" href="https://www.python.org">Python</a> versions 3.7 to 3.12.</p> +<p>The modules in the classic PyGreSQL variant need <a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> version 4.0 or above, while the modules in the universal DB-API 2 variant run with any Python <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a> compliant database interface module.</p> -</div> -<div class="section" id="functionality"> -<h1>Functionality</h1> +</section> +<section id="functionality"> +<h2>Functionality</h2> <p>This section will refer to the names in the DB-API 2 variant only, but the same applies to the classic PyGreSQL variant.</p> -<div class="section" id="simplepooleddb"> -<h2>SimplePooledDB</h2> -<p><span class="docutils literal">DBUtils.SimplePooledDB</span> is a very basic reference implementation of -a pooled database connection. It is much less sophisticated than the -regular <span class="docutils literal">PooledDB</span> module and is particularly lacking the failover -functionality. <span class="docutils literal">DBUtils.SimplePooledDB</span> is essentially the same as -the <span class="docutils literal">MiscUtils.DBPool</span> module that is part of Webware for Python. +<p>DBUtils installs itself as a package <span class="docutils literal">dbutils</span> containing all the modules +that are described in this guide. Each of these modules contains essentially +one class with an analogous name that provides the corresponding functionality. +For instance, the module <span class="docutils literal">dbutils.pooled_db</span> contains the class <span class="docutils literal">PooledDB</span>.</p> +<section id="simplepooleddb-simple-pooled-db"> +<h3>SimplePooledDB (simple_pooled_db)</h3> +<p>The class <span class="docutils literal">SimplePooledDB</span> in <span class="docutils literal">dbutils.simple_pooled_db</span> is a very basic +reference implementation of a pooled database connection. It is much less +sophisticated than the regular <span class="docutils literal">pooled_db</span> module and is particularly lacking +the failover functionality. <span class="docutils literal">dbutils.simple_pooled_db</span> is essentially the +same as the <span class="docutils literal">MiscUtils.DBPool</span> module that is part of Webware for Python. You should consider it a demonstration of concept rather than something that should go into production.</p> -</div> -<div class="section" id="steadydb"> -<h2>SteadyDB</h2> -<p><span class="docutils literal">DBUtils.SteadyDB</span> is a module implementing "hardened" connections -to a database, based on ordinary connections made by any DB-API 2 -database module. A "hardened" connection will transparently reopen upon -access when it has been closed or the database connection has been lost +</section> +<section id="steadydbconnection-steady-db"> +<h3>SteadyDBConnection (steady_db)</h3> +<p>The class <span class="docutils literal">SteadyDBConnection</span> in the module <span class="docutils literal">dbutils.steady_db</span> implements +"hardened" connections to a database, based on ordinary connections made by any +DB-API 2 database module. A "hardened" connection will transparently reopen +upon access when it has been closed or the database connection has been lost or when it is used more often than an optional usage limit.</p> <p>A typical example where this is needed is when the database has been restarted while your application is still running and has open connections to the database, or when your application accesses a remote database in a network that is separated by a firewall and the firewall has been restarted and lost its state.</p> -<p>Usually, you will not use the <span class="docutils literal">SteadyDB</span> module directly; it merely serves -as a basis for the next two modules, <span class="docutils literal">PersistentDB</span> and <span class="docutils literal">PooledDB</span>.</p> -</div> -<div class="section" id="persistentdb"> -<h2>PersistentDB</h2> -<p><span class="docutils literal">DBUtils.PersistentDB</span> implements steady, thread-affine, persistent -connections to a database, using any DB-API 2 database module.</p> +<p>Usually, you will not use the <span class="docutils literal">steady_db</span> module directly; it merely serves +as a basis for the next two modules, <span class="docutils literal">persistent_db</span> and <span class="docutils literal">Pooled_db</span>.</p> +</section> +<section id="persistentdb-persistent-db"> +<h3>PersistentDB (persistent_db)</h3> +<p>The class <span class="docutils literal">PersistentDB</span> in the module <span class="docutils literal">dbutils.persistent_db</span> implements +steady, thread-affine, persistent connections to a database, using any DB-API 2 +database module. "Thread-affine" and "persistent" means that the individual +database connections stay assigned to the respective threads and will not be +closed during the lifetime of the threads.</p> <p>The following diagram shows the connection layers involved when you -are using <span class="docutils literal">PersistentDB</span> connections:</p> -<img alt="persist.gif" src="persist.gif" /> +are using <span class="docutils literal">persistent_db</span> connections:</p> +<img alt="persistent.png" src="persistent.png" /> <p>Whenever a thread opens a database connection for the first time, a new connection to the database will be opened that will be used from now on for this specific thread. When the thread closes the database connection, it will still be kept open so that the next time when a connection is requested by the same thread, this already opened connection can be used. The connection will be closed automatically when the thread dies.</p> -<p>In short: <span class="docutils literal">PersistentDB</span> tries to recycle database connections to +<p>In short: <span class="docutils literal">persistent_db</span> tries to recycle database connections to increase the overall database access performance of your threaded application, but it makes sure that connections are never shared between threads.</p> -<p>Therefore, <span class="docutils literal">PersistentDB</span> will work perfectly even if the underlying +<p>Therefore, <span class="docutils literal">persistent_db</span> will work perfectly even if the underlying DB-API module is not thread-safe at the connection level, and it will avoid problems when other threads change the database session or perform transactions spreading over more than one SQL command.</p> -</div> -<div class="section" id="pooleddb"> -<h2>PooledDB</h2> -<p><span class="docutils literal">DBUtils.PooledDB</span> implements a pool of steady, thread-safe cached -connections to a database which are transparently reused, using any -DB-API 2 database module.</p> +</section> +<section id="pooleddb-pooled-db"> +<h3>PooledDB (pooled_db)</h3> +<p>The class <span class="docutils literal">PooledDB</span> in the module <span class="docutils literal">dbutils.pooled_db</span> implements a pool +of steady, thread-safe cached connections to a database which are transparently +reused, using any DB-API 2 database module.</p> <p>The following diagram shows the connection layers involved when you -are using <span class="docutils literal">PooledDB</span> connections:</p> -<img alt="pool.gif" src="pool.gif" /> -<p>As the diagram indicates, <span class="docutils literal">PooledDB</span> can share opened database connections +are using <span class="docutils literal">pooled_db</span> connections:</p> +<img alt="pooled.png" src="pooled.png" /> +<p>As the diagram indicates, <span class="docutils literal">pooled_db</span> can share opened database connections between different threads. This will happen by default if you set up the connection pool with a positive value of <span class="docutils literal">maxshared</span> and the underlying DB-API 2 is thread-safe at the connection level, but you can also request @@ -229,46 +213,46 @@ <h2>PooledDB</h2> at least <span class="docutils literal">mincached</span> and at the most <span class="docutils literal">maxcached</span> idle connections that will be used whenever a thread is requesting a dedicated database connection or the pool of shared connections is not yet full. When a thread closes a -connection that is not shared any more, it is returned back to the pool of +connection that is not shared anymore, it is returned back to the pool of idle connections so that it can be recycled again.</p> <p>If the underlying DB-API module is not thread-safe, thread locks will be -used to ensure that the <span class="docutils literal">PooledDB</span> connections are thread-safe. So you +used to ensure that the <span class="docutils literal">pooled_db</span> connections are thread-safe. So you don't need to worry about that, but you should be careful to use dedicated connections whenever you change the database session or perform transactions spreading over more than one SQL command.</p> -</div> -<div class="section" id="which-one-to-use"> -<h2>Which one to use?</h2> -<p>Both <span class="docutils literal">PersistentDB</span> and <span class="docutils literal">PooledDB</span> serve the same purpose to improve +</section> +<section id="which-one-to-use"> +<h3>Which one to use?</h3> +<p>Both <span class="docutils literal">persistent_db</span> and <span class="docutils literal">pooled_db</span> serve the same purpose to improve the database access performance by recycling database connections, while preserving stability even if database connection will be disrupted.</p> <p>So which of these two modules should you use? From the above explanations -it is clear that <span class="docutils literal">PersistentDB</span> will make more sense if your application +it is clear that <span class="docutils literal">persistent_db</span> will make more sense if your application keeps a constant number of threads which frequently use the database. In this case, you will always have the same amount of open database connections. However, if your application frequently starts and ends threads, then it -will be better to use <span class="docutils literal">PooledDB</span>. The latter will also allow more +will be better to use <span class="docutils literal">pooled_db</span>. The latter will also allow more fine-tuning, particularly if you are using a thread-safe DB-API 2 module.</p> <p>Since the interface of both modules is similar, you can easily switch from one to the other and check which one will suit better.</p> -</div> -</div> -<div class="section" id="usage"> -<h1>Usage</h1> +</section> +</section> +<section id="usage"> +<h2>Usage</h2> <p>The usage of all the modules is similar, but there are also some differences in the initialization between the "Pooled" and "Persistent" variants and also between the universal DB-API 2 and the classic PyGreSQL variants.</p> -<p>We will cover here only the <span class="docutils literal">PersistentDB</span> module and the more complex -<span class="docutils literal">PooledDB</span> module. For the details of the other modules, have a look +<p>We will cover here only the <span class="docutils literal">persistent_db</span> module and the more complex +<span class="docutils literal">pooled_db</span> module. For the details of the other modules, have a look at their module docstrings. Using the Python interpreter console, you can -display the documentation of the <span class="docutils literal">PooledDB</span> module as follows (this +display the documentation of the <span class="docutils literal">pooled_db</span> module as follows (this works analogously for the other modules):</p> -<pre class="literal-block">help(PooledDB)</pre> -<div class="section" id="id1"> -<h2>PersistentDB</h2> -<p>In order to make use of the <span class="docutils literal">PersistentDB</span> module, you first need to set +<pre class="literal-block">help(pooled_db)</pre> +<section id="persistentdb-persistent-db-1"> +<h3>PersistentDB (persistent_db)</h3> +<p>In order to make use of the <span class="docutils literal">persistent_db</span> module, you first need to set up a generator for your kind of database connections by creating an instance -of <span class="docutils literal">PersistentDB</span>, passing the following parameters:</p> +of <span class="docutils literal">persistent_db</span>, passing the following parameters:</p> <ul> <li><p><span class="docutils literal">creator</span>: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module</p></li> @@ -280,7 +264,8 @@ <h2>PersistentDB</h2> prepare the session, e.g. <span class="docutils literal">["set datestyle to german", <span class="pre">...]</span></span></p></li> <li><p><span class="docutils literal">failures</span>: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, -if the default (OperationalError, InternalError) is not adequate</p></li> +if the default (OperationalError, InterfaceError, InternalError) +is not adequate for the used database module</p></li> <li><p><span class="docutils literal">ping</span>: an optional flag controlling when connections are checked with the <span class="docutils literal">ping()</span> method if such a method is available (<span class="docutils literal">0</span> = <span class="docutils literal">None</span> = never, <span class="docutils literal">1</span> = default = whenever it is requested, @@ -300,32 +285,35 @@ <h2>PersistentDB</h2> <p>For instance, if you are using <span class="docutils literal">pgdb</span> as your DB-API 2 database module and want every connection to your local database <span class="docutils literal">mydb</span> to be reused 1000 times:</p> <pre class="literal-block">import pgdb # import used DB-API 2 module -from DBUtils.PersistentDB import PersistentDB +from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='mydb')</pre> <p>Once you have set up the generator with these parameters, you can request database connections of that kind:</p> <pre class="literal-block">db = persist.connection()</pre> <p>You can use these connections just as if they were ordinary DB-API 2 -connections. Actually what you get is the hardened <span class="docutils literal">SteadyDB</span> version of +connections. Actually what you get is the hardened <span class="docutils literal">steady_db</span> version of the underlying DB-API 2 connection.</p> <p>Closing a persistent connection with <span class="docutils literal">db.close()</span> will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. -You can change this behavior be setting the <span class="docutils literal">closeable</span> parameter.</p> +You can change this behavior by setting the <span class="docutils literal">closeable</span> parameter.</p> +<aside class="admonition warning"> +<p class="admonition-title">Warning</p> <p>Note that you need to explicitly start transactions by calling the <span class="docutils literal">begin()</span> method. This ensures that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being reused by the same thread.</p> +</aside> <p>By setting the <span class="docutils literal">threadlocal</span> parameter to <span class="docutils literal">threading.local</span>, getting connections may become a bit faster, but this may not work in all environments (for instance, <span class="docutils literal">mod_wsgi</span> is known to cause problems since it clears the <span class="docutils literal">threading.local</span> data between requests).</p> -</div> -<div class="section" id="id2"> -<h2>PooledDB</h2> -<p>In order to make use of the <span class="docutils literal">PooledDB</span> module, you first need to set up the -database connection pool by creating an instance of <span class="docutils literal">PooledDB</span>, passing the +</section> +<section id="pooleddb-pooled-db-1"> +<h3>PooledDB (pooled_db)</h3> +<p>In order to make use of the <span class="docutils literal">pooled_db</span> module, you first need to set up the +database connection pool by creating an instance of <span class="docutils literal">pooled_db</span>, passing the following parameters:</p> <ul> <li><p><span class="docutils literal">creator</span>: either an arbitrary function returning new DB-API 2 @@ -353,11 +341,12 @@ <h2>PooledDB</h2> <li><p><span class="docutils literal">setsession</span>: an optional list of SQL commands that may serve to prepare the session, e.g. <span class="docutils literal">["set datestyle to german", <span class="pre">...]</span></span></p></li> <li><p><span class="docutils literal">reset</span>: how connections should be reset when returned to the pool -(<span class="docutils literal">False</span> or <span class="docutils literal">None</span> to rollback transcations started with <span class="docutils literal">begin()</span>, +(<span class="docutils literal">False</span> or <span class="docutils literal">None</span> to rollback transactions started with <span class="docutils literal">begin()</span>, the default value <span class="docutils literal">True</span> always issues a rollback for safety's sake)</p></li> <li><p><span class="docutils literal">failures</span>: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, -if the default (OperationalError, InternalError) is not adequate</p></li> +if the default (OperationalError, InterfaceError, InternalError) +is not adequate for the used database module</p></li> <li><p><span class="docutils literal">ping</span>: an optional flag controlling when connections are checked with the <span class="docutils literal">ping()</span> method if such a method is available (<span class="docutils literal">0</span> = <span class="docutils literal">None</span> = never, <span class="docutils literal">1</span> = default = whenever fetched from the pool, @@ -372,13 +361,13 @@ <h2>PooledDB</h2> <p>For instance, if you are using <span class="docutils literal">pgdb</span> as your DB-API 2 database module and want a pool of at least five connections to your local database <span class="docutils literal">mydb</span>:</p> <pre class="literal-block">import pgdb # import used DB-API 2 module -from DBUtils.PooledDB import PooledDB +from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='mydb')</pre> <p>Once you have set up the connection pool you can request database connections from that pool:</p> <pre class="literal-block">db = pool.connection()</pre> <p>You can use these connections just as if they were ordinary DB-API 2 -connections. Actually what you get is the hardened <span class="docutils literal">SteadyDB</span> version of +connections. Actually what you get is the hardened <span class="docutils literal">steady_db</span> version of the underlying DB-API 2 connection.</p> <p>Please note that the connection may be shared with other threads by default if you set a non-zero <span class="docutils literal">maxshared</span> parameter and the DB-API 2 module allows @@ -386,9 +375,9 @@ <h2>PooledDB</h2> <pre class="literal-block">db = pool.connection(shareable=False)</pre> <p>Instead of this, you can also get a dedicated connection as follows:</p> <pre class="literal-block">db = pool.dedicated_connection()</pre> -<p>If you don't need it any more, you should immediately return it to the +<p>If you don't need it anymore, you should immediately return it to the pool with <span class="docutils literal">db.close()</span>. You can get another connection in the same way.</p> -<p><em>Warning:</em> In a threaded environment, never do the following:</p> +<p>⚠Warning: In a threaded environment, never do the following:</p> <pre class="literal-block">pool.connection().cursor().execute(...)</pre> <p>This would release the connection too early for reuse which may be fatal if the connections are not thread-safe. Make sure that the connection @@ -399,60 +388,66 @@ <h2>PooledDB</h2> res = cur.fetchone() cur.close() # or del cur db.close() # or del db</pre> +<p>You can also use context managers for simpler code:</p> +<pre class="literal-block">with pool.connection() as db: + with db.cursor() as cur: + cur.execute(...) + res = cur.fetchone()</pre> +<aside class="admonition warning"> +<p class="admonition-title">Warning</p> <p>Note that you need to explicitly start transactions by calling the <span class="docutils literal">begin()</span> method. This ensures that the connection will not be shared with other threads, that the transparent reopening will be suspended until the end of the transaction, and that the connection will be rolled back before being given back to the connection pool.</p> -</div> -<div class="section" id="usage-in-webware-for-python"> -<h2>Usage in Webware for Python</h2> -<p>If you are using DBUtils in order to access a database from <a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware -for Python</a> servlets, you need to make sure that you set up your -database connection generators only once when the application starts, -and not every time a servlet instance is created. For this purpose, -you can add the necessary code to the module or class initialization -code of your base servlet class, or you can use the <span class="docutils literal">contextInitialize()</span> -function in the <span class="docutils literal">__init__.py</span> script of your application context.</p> -<p>The directory <span class="docutils literal">Examples</span> that is part of the DButils distribution -contains an example context for Webware for Python that uses a small -demo database designed to track the attendees for a series of seminars -(the idea for this example has been taken from the article -"<a class="reference external" href="http://www.linuxjournal.com/article/2605">The Python DB-API</a>" by Andrew Kuchling).</p> -<p>The example context can be configured by either creating a config file -<span class="docutils literal">Configs/Database.config</span> or by directly changing the default parameters -in the example servlet <span class="docutils literal">Examples/DBUtilsExample.py</span>. This way you can -set an appropriate database user and password, and you can choose the -underlying database module (PyGreSQL classic or any DB-API 2 module). -If the setting <span class="docutils literal">maxcached</span> is present, then the example servlet will use -the "Pooled" variant, otherwise it will use the "Persistent" variant.</p> -</div> -</div> -<div class="section" id="notes"> -<h1>Notes</h1> +</aside> +</section> +</section> +<section id="advanced-usage"> +<h2>Advanced Usage</h2> +<p>Sometimes you may want to prepare connections before they are used by +DBUtils, in ways that are not possible by just using the right parameters. +For instance, <span class="docutils literal">pyodbc</span> may require to configure connections by calling +the <span class="docutils literal">setencoding()</span> method of the connection. You can do this by passing +a modified <span class="docutils literal">connect()</span> function to <span class="docutils literal">PersistentDB</span> or <span class="docutils literal">PooledDB</span> as +<span class="docutils literal">creator</span> (the first argument), like this:</p> +<pre class="literal-block">from pyodbc import connect +from dbutils.pooled_db import PooledDB + +def creator(): + con = connect(...) + con.setdecoding(...) + return con + +creator.dbapi = pyodbc + +db_pool = PooledDB(creator, mincached=5)</pre> +</section> +<section id="notes"> +<h2>Notes</h2> <p>If you are using one of the popular object-relational mappers <a class="reference external" href="http://www.sqlobject.org/">SQLObject</a> -or <a class="reference external" href="http://www.sqlalchemy.org">SQLAlchemy</a>, you won't need DBUtils, since they come with their own +or <a class="reference external" href="https://www.sqlalchemy.org">SQLAlchemy</a>, you won't need DBUtils, since they come with their own connection pools. SQLObject 2 (SQL-API) is actually borrowing some code from DBUtils to split the pooling out into a separate layer.</p> <p>Also note that when you are using a solution like the Apache webserver with <a class="reference external" href="http://modpython.org/">mod_python</a> or <a class="reference external" href="https://github.com/GrahamDumpleton/mod_wsgi">mod_wsgi</a>, then your Python code will be usually run in the context of the webserver's child processes. So if you are using -the <span class="docutils literal">PooledDB</span> module, and several of these child processes are running, +the <span class="docutils literal">pooled_db</span> module, and several of these child processes are running, you will have as much database connection pools. If these processes are running many threads, this may still be a reasonable approach, but if these processes don't spawn more than one worker thread, as in the case of Apache's "prefork" multi-processing module, this approach does not make sense. If you're running such a configuration, you should resort to a middleware -for connection pooling that supports multi-processing, such as <a class="reference external" href="http://www.pgpool.net/">pgpool</a> +for connection pooling that supports multi-processing, such as <a class="reference external" href="https://www.pgpool.net/">pgpool</a> or <a class="reference external" href="https://pgbouncer.github.io/">pgbouncer</a> for the PostgreSQL database.</p> -</div> -<div class="section" id="future"> -<h1>Future</h1> +</section> +<section id="future"> +<h2>Future</h2> <p>Some ideas for future improvements:</p> <ul class="simple"> <li><p>Alternatively to the maximum number of uses of a connection, implement a maximum time to live for connections.</p></li> -<li><p>Create modules <span class="docutils literal">MonitorDB</span> and <span class="docutils literal">MonitorPg</span> that will run in a separate +<li><p>Create modules <span class="docutils literal">monitor_db</span> and <span class="docutils literal">monitor_pg</span> that will run in a separate thread, monitoring the pool of the idle connections and maybe also the shared connections respectively the thread-affine connections. If a disrupted connection is detected, then it will be reestablished automatically @@ -466,16 +461,14 @@ <h1>Future</h1> the connection pool every day shortly before the users arrive.</p></li> <li><p>Optionally log usage, bad connections and exceeding of limits.</p></li> </ul> -</div> -<div class="section" id="bug-reports-and-feedback"> -<h1>Bug reports and feedback</h1> -<p>Please send bug reports, patches and feedback directly to the author -(using the email address given below).</p> -<p>If there are Webware related problems, these can also be discussed in -the <a class="reference external" href="https://lists.sourceforge.net/lists/listinfo/webware-discuss">Webware for Python mailing list</a>.</p> -</div> -<div class="section" id="links"> -<h1>Links</h1> +</section> +<section id="bug-reports-and-feedback"> +<h2>Bug reports and feedback</h2> +<p>You can transmit bug reports, patches and feedback by creating <a class="reference external" href="https://github.com/WebwareForPython/DBUtils/issues">issues</a> or +<a class="reference external" href="https://github.com/WebwareForPython/DBUtils/pulls">pull requests</a> on the GitHub project page for DBUtils.</p> +</section> +<section id="links"> +<h2>Links</h2> <p>Some links to related and alternative software:</p> <ul class="simple"> <li><p><a class="reference external" href="https://github.com/WebwareForPython/DBUtils">DBUtils</a></p></li> @@ -483,20 +476,20 @@ <h1>Links</h1> <li><p><a class="reference external" href="https://webwareforpython.github.io/w4py/">Webware for Python</a> framework</p></li> <li><p>Python <a class="reference external" href="https://www.python.org/dev/peps/pep-0249/">DB-API 2</a></p></li> <li><p><a class="reference external" href="https://www.postgresql.org/">PostgreSQL</a> database</p></li> -<li><p><a class="reference external" href="http://www.pygresql.org/">PyGreSQL</a> Python adapter for PostgreSQL</p></li> -<li><p><a class="reference external" href="http://www.pgpool.net/">pgpool</a> middleware for PostgreSQL connection pooling</p></li> +<li><p><a class="reference external" href="https://www.pygresql.org/">PyGreSQL</a> Python adapter for PostgreSQL</p></li> +<li><p><a class="reference external" href="https://www.pgpool.net/">pgpool</a> middleware for PostgreSQL connection pooling</p></li> <li><p><a class="reference external" href="https://pgbouncer.github.io/">pgbouncer</a> lightweight PostgreSQL connection pooling</p></li> <li><p><a class="reference external" href="http://www.sqlobject.org/">SQLObject</a> object-relational mapper</p></li> -<li><p><a class="reference external" href="http://www.sqlalchemy.org">SQLAlchemy</a> object-relational mapper</p></li> +<li><p><a class="reference external" href="https://www.sqlalchemy.org">SQLAlchemy</a> object-relational mapper</p></li> </ul> -</div> -<div class="section" id="credits"> -<h1>Credits</h1> +</section> +<section id="credits"> +<h2>Credits</h2> <dl class="field-list simple"> -<dt>Author</dt> -<dd><p>Christoph Zwerschke <<a class="reference external" href="mailto:cito@online.de">cito@online.de</a>></p> +<dt>Author<span class="colon">:</span></dt> +<dd><p><a class="reference external" href="https://github.com/Cito">Christoph Zwerschke</a></p> </dd> -<dt>Contributions</dt> +<dt>Contributions<span class="colon">:</span></dt> <dd><p>DBUtils uses code, input and suggestions made by Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), Jay Love, Michael Palmer, Tom Schwaller, Geoffrey Talvola, @@ -504,14 +497,14 @@ <h1>Credits</h1> Matthew Harriger, Gregory Piñero and Josef van Eenbergen.</p> </dd> </dl> -</div> -<div class="section" id="copyright-and-license"> -<h1>Copyright and License</h1> -<p>Copyright © 2005-2018 by Christoph Zwerschke. +</section> +<section id="copyright-and-license"> +<h2>Copyright and License</h2> +<p>Copyright © 2005-2024 by Christoph Zwerschke. All Rights Reserved.</p> <p>DBUtils is free and open source software, licensed under the <a class="reference external" href="https://opensource.org/licenses/MIT">MIT license</a>.</p> -</div> -</div> +</section> +</main> </body> </html> diff --git a/DBUtils/Docs/UsersGuide.rst b/docs/main.rst similarity index 63% rename from DBUtils/Docs/UsersGuide.rst rename to docs/main.rst index 5c17298..816ae80 100644 --- a/DBUtils/Docs/UsersGuide.rst +++ b/docs/main.rst @@ -1,11 +1,10 @@ DBUtils User's Guide ++++++++++++++++++++ -:Version: 1.4 -:Released: 09/26/20 +:Version: 3.1.0 :Translations: English | German_ -.. _German: UsersGuide.de.html +.. _German: main.de.html .. contents:: Contents @@ -14,10 +13,12 @@ Synopsis ======== DBUtils_ is a suite of Python modules allowing to connect in a safe and -efficient way between a threaded Python_ application and a database. DBUtils -has been written in view of `Webware for Python`_ as the application and -PyGreSQL_ as the adapter to a PostgreSQL_ database, but it can be used -for any other Python application and `DB-API 2`_ conformant database adapter. +efficient way between a threaded Python_ application and a database. + +DBUtils has been originally written particularly for `Webware for Python`_ as +the application and PyGreSQL_ as the adapter to a PostgreSQL_ database, but it +can meanwhile be used for any other Python application and `DB-API 2`_ +conformant database adapter. Modules @@ -27,39 +28,39 @@ The DBUtils suite is realized as a Python package containing two subsets of modules, one for use with arbitrary DB-API 2 modules, the other one for use with the classic PyGreSQL module. -+-------------------+------------------------------------------+ -| Universal DB-API 2 variant | -+===================+==========================================+ -| SteadyDB.py | Hardened DB-API 2 connections | -+-------------------+------------------------------------------+ -| PooledDB.py | Pooling for DB-API 2 connections | -+-------------------+------------------------------------------+ -| PersistentDB.py | Persistent DB-API 2 connections | -+-------------------+------------------------------------------+ -| SimplePooledDB.py | Simple pooling for DB-API 2 | -+-------------------+------------------------------------------+ - -+-------------------+------------------------------------------+ -| Classic PyGreSQL variant | -+===================+==========================================+ -| SteadyPg.py | Hardened classic PyGreSQL connections | -+-------------------+------------------------------------------+ -| PooledPg.py | Pooling for classic PyGreSQL connections | -+-------------------+------------------------------------------+ -| PersistentPg.py | Persistent classic PyGreSQL connections | -+-------------------+------------------------------------------+ -| SimplePooledPg.py | Simple pooling for classic PyGreSQL | -+-------------------+------------------------------------------+ ++------------------+------------------------------------------+ +| Universal DB-API 2 variant | ++==================+==========================================+ +| steady_db | Hardened DB-API 2 connections | ++------------------+------------------------------------------+ +| pooled_db | Pooling for DB-API 2 connections | ++------------------+------------------------------------------+ +| persistent_db | Persistent DB-API 2 connections | ++------------------+------------------------------------------+ +| simple_pooled_db | Simple pooling for DB-API 2 | ++------------------+------------------------------------------+ + ++------------------+------------------------------------------+ +| Classic PyGreSQL variant | ++==================+==========================================+ +| steady_pg | Hardened classic PyGreSQL connections | ++------------------+------------------------------------------+ +| pooled_pg | Pooling for classic PyGreSQL connections | ++------------------+------------------------------------------+ +| persistent_pg | Persistent classic PyGreSQL connections | ++------------------+------------------------------------------+ +| simple_pooled_pg | Simple pooling for classic PyGreSQL | ++------------------+------------------------------------------+ The dependencies of the modules in the universal DB-API 2 variant are as indicated in the following diagram: -.. image:: dbdep.gif +.. image:: dependencies_db.png The dependencies of the modules in the classic PyGreSQL variant are similar: -.. image:: pgdep.gif +.. image:: dependencies_pg.png Download @@ -78,39 +79,23 @@ The source code repository can be found here on GitHub:: Installation ============ -Installation as a standalone (top-level) package ------------------------------------------------- -If you intend to use DBUtils from other applications than Webware for Python, -it is recommended to install the package in the usual way:: +Installation +------------ +The package can be installed in the usual way:: python setup.py install -You can also use `pip`_ for download and installation:: +It is even easier to download and install the package in one go using `pip`_:: pip install DBUtils .. _pip: https://pip.pypa.io/ -Installation as a Webware for Python subpackage (plug-in) ---------------------------------------------------------- -If you want to use DBUtils as a supplement for the Webware for Python -framework only, you should install it as a Webware plug-in:: - - python setup.py install --install-lib=/path/to/Webware - -Replace ``/path/to/Webware`` with the path to the root directory of -your Webware for Python installation. You will also need to run the -Webware installer if this has not been done already or if you want to -integrate the DBUtils documentation into the Webware documentation:: - - cd path/to/Webware - python install.py - Requirements ============ -DBUtils supports Python_ version 2.6 and Python versions 3.5 to 3.8. +DBUtils supports Python_ versions 3.7 to 3.12. The modules in the classic PyGreSQL variant need PyGreSQL_ version 4.0 or above, while the modules in the universal DB-API 2 variant run with @@ -123,22 +108,27 @@ Functionality This section will refer to the names in the DB-API 2 variant only, but the same applies to the classic PyGreSQL variant. -SimplePooledDB --------------- -``DBUtils.SimplePooledDB`` is a very basic reference implementation of -a pooled database connection. It is much less sophisticated than the -regular ``PooledDB`` module and is particularly lacking the failover -functionality. ``DBUtils.SimplePooledDB`` is essentially the same as -the ``MiscUtils.DBPool`` module that is part of Webware for Python. +DBUtils installs itself as a package ``dbutils`` containing all the modules +that are described in this guide. Each of these modules contains essentially +one class with an analogous name that provides the corresponding functionality. +For instance, the module ``dbutils.pooled_db`` contains the class ``PooledDB``. + +SimplePooledDB (simple_pooled_db) +--------------------------------- +The class ``SimplePooledDB`` in ``dbutils.simple_pooled_db`` is a very basic +reference implementation of a pooled database connection. It is much less +sophisticated than the regular ``pooled_db`` module and is particularly lacking +the failover functionality. ``dbutils.simple_pooled_db`` is essentially the +same as the ``MiscUtils.DBPool`` module that is part of Webware for Python. You should consider it a demonstration of concept rather than something that should go into production. -SteadyDB --------- -``DBUtils.SteadyDB`` is a module implementing "hardened" connections -to a database, based on ordinary connections made by any DB-API 2 -database module. A "hardened" connection will transparently reopen upon -access when it has been closed or the database connection has been lost +SteadyDBConnection (steady_db) +------------------------------ +The class ``SteadyDBConnection`` in the module ``dbutils.steady_db`` implements +"hardened" connections to a database, based on ordinary connections made by any +DB-API 2 database module. A "hardened" connection will transparently reopen +upon access when it has been closed or the database connection has been lost or when it is used more often than an optional usage limit. A typical example where this is needed is when the database has been @@ -147,18 +137,21 @@ to the database, or when your application accesses a remote database in a network that is separated by a firewall and the firewall has been restarted and lost its state. -Usually, you will not use the ``SteadyDB`` module directly; it merely serves -as a basis for the next two modules, ``PersistentDB`` and ``PooledDB``. +Usually, you will not use the ``steady_db`` module directly; it merely serves +as a basis for the next two modules, ``persistent_db`` and ``Pooled_db``. -PersistentDB ------------- -``DBUtils.PersistentDB`` implements steady, thread-affine, persistent -connections to a database, using any DB-API 2 database module. +PersistentDB (persistent_db) +---------------------------- +The class ``PersistentDB`` in the module ``dbutils.persistent_db`` implements +steady, thread-affine, persistent connections to a database, using any DB-API 2 +database module. "Thread-affine" and "persistent" means that the individual +database connections stay assigned to the respective threads and will not be +closed during the lifetime of the threads. The following diagram shows the connection layers involved when you -are using ``PersistentDB`` connections: +are using ``persistent_db`` connections: -.. image:: persist.gif +.. image:: persistent.png Whenever a thread opens a database connection for the first time, a new connection to the database will be opened that will be used from now on @@ -167,27 +160,27 @@ it will still be kept open so that the next time when a connection is requested by the same thread, this already opened connection can be used. The connection will be closed automatically when the thread dies. -In short: ``PersistentDB`` tries to recycle database connections to +In short: ``persistent_db`` tries to recycle database connections to increase the overall database access performance of your threaded application, but it makes sure that connections are never shared between threads. -Therefore, ``PersistentDB`` will work perfectly even if the underlying +Therefore, ``persistent_db`` will work perfectly even if the underlying DB-API module is not thread-safe at the connection level, and it will avoid problems when other threads change the database session or perform transactions spreading over more than one SQL command. -PooledDB --------- -``DBUtils.PooledDB`` implements a pool of steady, thread-safe cached -connections to a database which are transparently reused, using any -DB-API 2 database module. +PooledDB (pooled_db) +-------------------- +The class ``PooledDB`` in the module ``dbutils.pooled_db`` implements a pool +of steady, thread-safe cached connections to a database which are transparently +reused, using any DB-API 2 database module. The following diagram shows the connection layers involved when you -are using ``PooledDB`` connections: +are using ``pooled_db`` connections: -.. image:: pool.gif +.. image:: pooled.png -As the diagram indicates, ``PooledDB`` can share opened database connections +As the diagram indicates, ``pooled_db`` can share opened database connections between different threads. This will happen by default if you set up the connection pool with a positive value of ``maxshared`` and the underlying DB-API 2 is thread-safe at the connection level, but you can also request @@ -196,27 +189,27 @@ Besides the pool of shared connections, you can also set up a pool of at least ``mincached`` and at the most ``maxcached`` idle connections that will be used whenever a thread is requesting a dedicated database connection or the pool of shared connections is not yet full. When a thread closes a -connection that is not shared any more, it is returned back to the pool of +connection that is not shared anymore, it is returned back to the pool of idle connections so that it can be recycled again. If the underlying DB-API module is not thread-safe, thread locks will be -used to ensure that the ``PooledDB`` connections are thread-safe. So you +used to ensure that the ``pooled_db`` connections are thread-safe. So you don't need to worry about that, but you should be careful to use dedicated connections whenever you change the database session or perform transactions spreading over more than one SQL command. Which one to use? ----------------- -Both ``PersistentDB`` and ``PooledDB`` serve the same purpose to improve +Both ``persistent_db`` and ``pooled_db`` serve the same purpose to improve the database access performance by recycling database connections, while preserving stability even if database connection will be disrupted. So which of these two modules should you use? From the above explanations -it is clear that ``PersistentDB`` will make more sense if your application +it is clear that ``persistent_db`` will make more sense if your application keeps a constant number of threads which frequently use the database. In this case, you will always have the same amount of open database connections. However, if your application frequently starts and ends threads, then it -will be better to use ``PooledDB``. The latter will also allow more +will be better to use ``pooled_db``. The latter will also allow more fine-tuning, particularly if you are using a thread-safe DB-API 2 module. Since the interface of both modules is similar, you can easily switch from @@ -230,19 +223,19 @@ The usage of all the modules is similar, but there are also some differences in the initialization between the "Pooled" and "Persistent" variants and also between the universal DB-API 2 and the classic PyGreSQL variants. -We will cover here only the ``PersistentDB`` module and the more complex -``PooledDB`` module. For the details of the other modules, have a look +We will cover here only the ``persistent_db`` module and the more complex +``pooled_db`` module. For the details of the other modules, have a look at their module docstrings. Using the Python interpreter console, you can -display the documentation of the ``PooledDB`` module as follows (this +display the documentation of the ``pooled_db`` module as follows (this works analogously for the other modules):: - help(PooledDB) + help(pooled_db) -PersistentDB ------------- -In order to make use of the ``PersistentDB`` module, you first need to set +PersistentDB (persistent_db) +---------------------------- +In order to make use of the ``persistent_db`` module, you first need to set up a generator for your kind of database connections by creating an instance -of ``PersistentDB``, passing the following parameters: +of ``persistent_db``, passing the following parameters: * ``creator``: either an arbitrary function returning new DB-API 2 connection objects or a DB-API 2 compliant database module @@ -257,7 +250,8 @@ of ``PersistentDB``, passing the following parameters: * ``failures``: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module * ``ping``: an optional flag controlling when connections are checked with the ``ping()`` method if such a method is available @@ -282,7 +276,7 @@ For instance, if you are using ``pgdb`` as your DB-API 2 database module and want every connection to your local database ``mydb`` to be reused 1000 times:: import pgdb # import used DB-API 2 module - from DBUtils.PersistentDB import PersistentDB + from dbutils.persistent_db import PersistentDB persist = PersistentDB(pgdb, 1000, database='mydb') Once you have set up the generator with these parameters, you can request @@ -291,29 +285,30 @@ database connections of that kind:: db = persist.connection() You can use these connections just as if they were ordinary DB-API 2 -connections. Actually what you get is the hardened ``SteadyDB`` version of +connections. Actually what you get is the hardened ``steady_db`` version of the underlying DB-API 2 connection. Closing a persistent connection with ``db.close()`` will be silently ignored since it would be reopened at the next usage anyway and contrary to the intent of having persistent connections. Instead, the connection will be automatically closed when the thread dies. -You can change this behavior be setting the ``closeable`` parameter. +You can change this behavior by setting the ``closeable`` parameter. -Note that you need to explicitly start transactions by calling the -``begin()`` method. This ensures that the transparent reopening will be -suspended until the end of the transaction, and that the connection -will be rolled back before being reused by the same thread. +.. warning:: + Note that you need to explicitly start transactions by calling the + ``begin()`` method. This ensures that the transparent reopening will be + suspended until the end of the transaction, and that the connection + will be rolled back before being reused by the same thread. By setting the ``threadlocal`` parameter to ``threading.local``, getting connections may become a bit faster, but this may not work in all environments (for instance, ``mod_wsgi`` is known to cause problems since it clears the ``threading.local`` data between requests). -PooledDB --------- -In order to make use of the ``PooledDB`` module, you first need to set up the -database connection pool by creating an instance of ``PooledDB``, passing the +PooledDB (pooled_db) +-------------------- +In order to make use of the ``pooled_db`` module, you first need to set up the +database connection pool by creating an instance of ``pooled_db``, passing the following parameters: * ``creator``: either an arbitrary function returning new DB-API 2 @@ -349,12 +344,13 @@ following parameters: prepare the session, e.g. ``["set datestyle to german", ...]`` * ``reset``: how connections should be reset when returned to the pool - (``False`` or ``None`` to rollback transcations started with ``begin()``, + (``False`` or ``None`` to rollback transactions started with ``begin()``, the default value ``True`` always issues a rollback for safety's sake) * ``failures``: an optional exception class or a tuple of exception classes for which the connection failover mechanism shall be applied, - if the default (OperationalError, InternalError) is not adequate + if the default (OperationalError, InterfaceError, InternalError) + is not adequate for the used database module * ``ping``: an optional flag controlling when connections are checked with the ``ping()`` method if such a method is available @@ -372,7 +368,7 @@ For instance, if you are using ``pgdb`` as your DB-API 2 database module and want a pool of at least five connections to your local database ``mydb``:: import pgdb # import used DB-API 2 module - from DBUtils.PooledDB import PooledDB + from dbutils.pooled_db import PooledDB pool = PooledDB(pgdb, 5, database='mydb') Once you have set up the connection pool you can request database connections @@ -381,7 +377,7 @@ from that pool:: db = pool.connection() You can use these connections just as if they were ordinary DB-API 2 -connections. Actually what you get is the hardened ``SteadyDB`` version of +connections. Actually what you get is the hardened ``steady_db`` version of the underlying DB-API 2 connection. Please note that the connection may be shared with other threads by default @@ -394,10 +390,10 @@ Instead of this, you can also get a dedicated connection as follows:: db = pool.dedicated_connection() -If you don't need it any more, you should immediately return it to the +If you don't need it anymore, you should immediately return it to the pool with ``db.close()``. You can get another connection in the same way. -*Warning:* In a threaded environment, never do the following:: +⚠Warning: In a threaded environment, never do the following:: pool.connection().cursor().execute(...) @@ -412,35 +408,41 @@ object stays alive as long as you are using it, like that:: cur.close() # or del cur db.close() # or del db -Note that you need to explicitly start transactions by calling the -``begin()`` method. This ensures that the connection will not be shared -with other threads, that the transparent reopening will be suspended -until the end of the transaction, and that the connection will be rolled -back before being given back to the connection pool. - -Usage in Webware for Python ---------------------------- -If you are using DBUtils in order to access a database from `Webware -for Python`_ servlets, you need to make sure that you set up your -database connection generators only once when the application starts, -and not every time a servlet instance is created. For this purpose, -you can add the necessary code to the module or class initialization -code of your base servlet class, or you can use the ``contextInitialize()`` -function in the ``__init__.py`` script of your application context. - -The directory ``Examples`` that is part of the DButils distribution -contains an example context for Webware for Python that uses a small -demo database designed to track the attendees for a series of seminars -(the idea for this example has been taken from the article -"`The Python DB-API`_" by Andrew Kuchling). - -The example context can be configured by either creating a config file -``Configs/Database.config`` or by directly changing the default parameters -in the example servlet ``Examples/DBUtilsExample.py``. This way you can -set an appropriate database user and password, and you can choose the -underlying database module (PyGreSQL classic or any DB-API 2 module). -If the setting ``maxcached`` is present, then the example servlet will use -the "Pooled" variant, otherwise it will use the "Persistent" variant. +You can also use context managers for simpler code:: + + with pool.connection() as db: + with db.cursor() as cur: + cur.execute(...) + res = cur.fetchone() + +.. warning:: + Note that you need to explicitly start transactions by calling the + ``begin()`` method. This ensures that the connection will not be shared + with other threads, that the transparent reopening will be suspended + until the end of the transaction, and that the connection will be rolled + back before being given back to the connection pool. + + +Advanced Usage +============== +Sometimes you may want to prepare connections before they are used by +DBUtils, in ways that are not possible by just using the right parameters. +For instance, ``pyodbc`` may require to configure connections by calling +the ``setencoding()`` method of the connection. You can do this by passing +a modified ``connect()`` function to ``PersistentDB`` or ``PooledDB`` as +``creator`` (the first argument), like this:: + + from pyodbc import connect + from dbutils.pooled_db import PooledDB + + def creator(): + con = connect(...) + con.setdecoding(...) + return con + + creator.dbapi = pyodbc + + db_pool = PooledDB(creator, mincached=5) Notes @@ -453,7 +455,7 @@ from DBUtils to split the pooling out into a separate layer. Also note that when you are using a solution like the Apache webserver with mod_python_ or mod_wsgi_, then your Python code will be usually run in the context of the webserver's child processes. So if you are using -the ``PooledDB`` module, and several of these child processes are running, +the ``pooled_db`` module, and several of these child processes are running, you will have as much database connection pools. If these processes are running many threads, this may still be a reasonable approach, but if these processes don't spawn more than one worker thread, as in the case of Apache's @@ -469,7 +471,7 @@ Some ideas for future improvements: * Alternatively to the maximum number of uses of a connection, implement a maximum time to live for connections. -* Create modules ``MonitorDB`` and ``MonitorPg`` that will run in a separate +* Create modules ``monitor_db`` and ``monitor_pg`` that will run in a separate thread, monitoring the pool of the idle connections and maybe also the shared connections respectively the thread-affine connections. If a disrupted connection is detected, then it will be reestablished automatically @@ -486,11 +488,12 @@ Some ideas for future improvements: Bug reports and feedback ======================== -Please send bug reports, patches and feedback directly to the author -(using the email address given below). +You can transmit bug reports, patches and feedback by creating issues_ or +`pull requests`_ on the GitHub project page for DBUtils. -If there are Webware related problems, these can also be discussed in -the `Webware for Python mailing list`_. +.. _GitHub-Projektseite: https://github.com/WebwareForPython/DBUtils +.. _Issues: https://github.com/WebwareForPython/DBUtils/issues +.. _Pull Requests: https://github.com/WebwareForPython/DBUtils/pulls Links @@ -515,20 +518,20 @@ Some links to related and alternative software: .. _DB-API 2: https://www.python.org/dev/peps/pep-0249/ .. _The Python DB-API: http://www.linuxjournal.com/article/2605 .. _PostgresQL: https://www.postgresql.org/ -.. _PyGreSQL: http://www.pygresql.org/ +.. _PyGreSQL: https://www.pygresql.org/ .. _SQLObject: http://www.sqlobject.org/ -.. _SQLAlchemy: http://www.sqlalchemy.org -.. _Apache: http://httpd.apache.org/ +.. _SQLAlchemy: https://www.sqlalchemy.org +.. _Apache: https://httpd.apache.org/ .. _mod_python: http://modpython.org/ .. _mod_wsgi: https://github.com/GrahamDumpleton/mod_wsgi -.. _pgpool: http://www.pgpool.net/ +.. _pgpool: https://www.pgpool.net/ .. _pgbouncer: https://pgbouncer.github.io/ Credits ======= -:Author: Christoph Zwerschke <cito@online.de> +:Author: `Christoph Zwerschke`_ :Contributions: DBUtils uses code, input and suggestions made by Ian Bicking, Chuck Esterbrook (Webware for Python), Dan Green (DBTools), @@ -536,11 +539,13 @@ Credits Warren Smith (DbConnectionPool), Ezio Vernacotola, Jehiah Czebotar, Matthew Harriger, Gregory Piñero and Josef van Eenbergen. +.. _Christoph Zwerschke: https://github.com/Cito + Copyright and License ===================== -Copyright © 2005-2018 by Christoph Zwerschke. +Copyright © 2005-2024 by Christoph Zwerschke. All Rights Reserved. DBUtils is free and open source software, diff --git a/docs/make.py b/docs/make.py new file mode 100755 index 0000000..a00977b --- /dev/null +++ b/docs/make.py @@ -0,0 +1,36 @@ +#!/usr/bin/python3.11 + +"""Build HTML from reST files.""" + +from pathlib import Path + +from docutils.core import publish_file + +print("Creating the documentation...") + +for rst_file in Path().glob('*.rst'): + rst_path = Path(rst_file) + name = Path(rst_file).stem + lang = Path(name).suffix + if lang.startswith('.'): + lang = lang[1:] + if lang == 'zh': + lang = 'zh_cn' + else: + lang = 'en' + html_path = Path(name + '.html') + print(name, lang) + + with rst_path.open(encoding='utf-8-sig') as source, \ + html_path.open('w', encoding='utf-8') as destination: + output = publish_file( + writer_name='html5', source=source, destination=destination, + enable_exit_status=True, + settings_overrides={ + "stylesheet_path": 'doc.css', + "embed_stylesheet": False, + "toc_backlinks": False, + "language_code": lang, + "exit_status_level": 2}) + +print("Done.") diff --git a/docs/persistent.png b/docs/persistent.png new file mode 100644 index 0000000..3eb2782 Binary files /dev/null and b/docs/persistent.png differ diff --git a/docs/pooled.png b/docs/pooled.png new file mode 100644 index 0000000..dc7ea83 Binary files /dev/null and b/docs/pooled.png differ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..02a8ff7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,142 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = [ + "setuptools>=68", +] + +[project] +name = "DBUtils" +version = "3.1.0" +description = "Database connections for multi-threaded environments." +license = {text = "MIT License"} +authors = [{name = "Christoph Zwerschke", email = "cito@online.de"}] +requires-python = ">3.7" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Database", + "Topic :: Internet :: WWW/HTTP :: Dynamic Content", + "Topic :: Software Development :: Libraries :: Python Modules", +] +[project.optional-dependencies] +pg = [ + "PyGreSQL>=5", +] +docs = [ + "docutils", +] +tests = [ + "pytest>=7", + "ruff", +] + +[project.readme] +file = "README.md" +content-type = "text/markdown" + +[project.urls] +Homepage = "https://webwareforpython.github.io/DBUtils/" +Download = "https://pypi.org/project/DBUtils/" +Documentation = "https://webwareforpython.github.io/DBUtils/main.html" +Changelog = "https://webwareforpython.github.io/DBUtils/changelog.html" +"Issue Tracker" = "https://github.com/WebwareForPython/DBUtils/issues" +"Source Code" = "https://github.com/WebwareForPython/DBUtils" + +[tool.setuptools] +packages = ["dbutils"] +platforms = ["any"] +include-package-data = false + +[tool.ruff] +line-length = 79 +target-version = "py37" + +[tool.ruff.lint] +select = [ + "A", # flake8-builtins + # "ANN", # flake8-annotations + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + # "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + "C90", # McCabe cyclomatic complexity + "COM", # flake8-commas + "D", # pydocstyle + "DTZ", # flake8-datetimez + "E", # pycodestyle + # "EM", # flake8-errmsg + "ERA", # eradicate + "EXE", # flake8-executable + "F", # Pyflakes + # "FBT", # flake8-boolean-trap + "G", # flake8-logging-format + "I", # isort + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "INT", # flake8-gettext + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # Pylint + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI", # flake8-pyi + # "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # Ruff-specific rules + "S", # flake8-bandit + # "SLF", # flake8-self + "SIM", # flake8-simplify + "T10", # flake8-debugger + "T20", # flake8-print + "TCH", # flake8-type-checking + "TID", # flake8-tidy-imports + # "TRY", # tryceratops + "UP", # pyupgrade + "W", # pycodestyle + "YTT", # flake8-2020 +] +# Note: use `ruff rule ...` to see explanations of rules +ignore = [ + "D203", # no blank line before class docstring + "D213", # multi-line docstrings should not start at second line +] + +[tool.ruff.lint.mccabe] +max-complexity = 30 + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "double" + +[tool.ruff.lint.pylint] +max-args = 12 +max-branches = 35 +max-statements = 95 + +[tool.ruff.lint.per-file-ignores] +"docs/*" = [ + "INP001", # allow stand-alone scripts + "T201", # allow print statements +] +"tests/*" = [ + "D", # no docstrings necessary here + "PLR2004", # allow magic values + "S101", # allow assert statements +] + +[tool.codespell] +skip = '.git,.tox,.venv,*.de.html,*.de.rst,build,dist,local' +quiet-level = 2 diff --git a/setup.py b/setup.py deleted file mode 100755 index 7e97f95..0000000 --- a/setup.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Setup Script for DBUtils""" - -import warnings -from distutils.core import setup -from sys import version_info - -py_version = version_info[:2] -if py_version != (2, 7) and not (3, 5) <= py_version < (4, 0): - raise ImportError('Python %d.%d is not supported by DBUtils.' % py_version) - -warnings.filterwarnings('ignore', 'Unknown distribution option') - -__version__ = '1.4' - -readme = open('README.md').read() - -setup( - name='DBUtils', - version=__version__, - description='Database connections for multi-threaded environments.', - long_description=readme, - long_description_content_type='text/markdown', - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Database', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', - 'Topic :: Software Development :: Libraries :: Python Modules' - ], - author='Christoph Zwerschke', - author_email='cito@online.de', - url='https://webwareforpython.github.io/DBUtils/', - platforms=['any'], - license='MIT License', - packages=['DBUtils', 'DBUtils.Examples', 'DBUtils.Tests'], - package_data={'DBUtils': ['Docs/*']} -) diff --git a/setversion.py b/setversion.py deleted file mode 100755 index 2164a07..0000000 --- a/setversion.py +++ /dev/null @@ -1,123 +0,0 @@ -#!/usr/bin/env python - -"""Set version. - -This script sets the DBUtils version number information -consistently in all files of the distribution. - -""" - -from __future__ import print_function - -import os -import sys -import re -from glob import glob - -# Version format is (Major, Minor, Sub, Alpha/Beta/etc) -# The Sub is optional, and if 0 is not returned. -# Examples: (0, 8, 1, 'b1'), (0, 8, 2) or (0, 9, 0, 'rc1') -# releaseDate format should be 'MM/DD/YY'. - -# Update this to change the current version and release date: -# version = ('X', 'Y', 0) -version = (1, 4, 0) -# releaseDate = '@@/@@/@@' -releaseDate = '09/26/20' - -# Verbose output (output unchanged files also): -verbose = False - -path = os.path.dirname(os.path.abspath(sys.argv[0])) -sys.path.append(path) -os.chdir(path) -print("Setversion", path) - - -def versionString(version): - """Create version string. - - For a sequence containing version information such as (2, 0, 0, 'pre'), - this returns a printable string such as '2.0pre'. - The micro version number is only excluded from the string if it is zero. - - """ - ver = list(map(str, version)) - numbers, rest = ver[:2 if ver[2] == '0' else 3], ver[3:] - return '.'.join(numbers) + '-'.join(rest) - - -versionString = versionString(version) - -if versionString == 'X.Y': - print("Please set the version.") - sys.exit(1) -if releaseDate == '@@/@@/@@': - print("Please set the release date.") - sys.exit(1) - - -class Replacer: - """Class to handle substitutions in a file.""" - - def __init__(self, *args): - self._subs = list(args) - - def add(self, search, replace): - self._subs.append((re.compile(search, re.M), replace)) - - def replaceInStr(self, data): - for search, replace in self._subs: - data = re.sub(search, replace, data) - return data - - def replaceInFile(self, filename): - data = open(filename).read() - newData = self.replaceInStr(data) - if data == newData: - if verbose: - print("Unchanged", filename) - else: - print("Updating", filename) - open(filename, 'w').write(newData) - - def replaceGlob(self, pattern): - for file in glob(pattern): - if os.path.exists(file): - self.replaceInFile(file) - - -pyReplace = Replacer() -pyReplace.add(r"(__version__\s*=\s*)'.*'", r"\g<1>%s" % repr(versionString)) - -propReplace = Replacer() -propReplace.add(r"(version\s*=\s*).*", r"\g<1>%s" % repr(version)) -propReplace.add(r"(releaseDate\s*=\s*).*", r"\g<1>%s" % repr(releaseDate)) - -htmlReplace = Replacer() -htmlReplace.add( - r"<!--\s*version\s*-->[^<]*<!--\s*/version\s*-->", - r"<!-- version --> %s <!-- /version -->" % versionString) -htmlReplace.add( - r"<!--\s*relDate\s*-->[^<]*<!--\s*/relDate\s*-->", - r"<!-- relDate --> %s <!-- /relDate -->" % releaseDate) - -rstReplace = Replacer() -rstReplace.add( - r"^:(.+)?: (X|\d+)\.(Y|\d+)(\.\d+)?$", r":\1: %s" % versionString) -rstReplace.add( - r"^:(.+)?: (@|\d){2}/(@|\d){2}/(@|\d){2}$", r":\1: %s" % releaseDate) - -# Replace in Python files: -pyReplace.replaceGlob('*.py') -pyReplace.replaceGlob('DBUtils/*.py') -pyReplace.replaceGlob('DBUtils/*/*.py') - -# Replace in Properties files: -propReplace.replaceGlob('DBUtils/Properties.py') - -# Replace in existing HTML: -htmlReplace.replaceGlob('DBUtils/Docs/*.html') - -# Replace in reStructuredText files: -rstReplace.replaceGlob('DBUtils/Docs/*.rst') diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..3c7c5ff --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,4 @@ +"""The DBUtils tests package.""" + +# make sure the mock pg module is installed +from . import mock_pg as pg # noqa: F401 diff --git a/DBUtils/Tests/mock_db.py b/tests/mock_db.py similarity index 93% rename from DBUtils/Tests/mock_db.py rename to tests/mock_db.py index 42296cb..32ad498 100644 --- a/DBUtils/Tests/mock_db.py +++ b/tests/mock_db.py @@ -1,8 +1,23 @@ """This module serves as a mock object for the DB-API 2 module""" +import sys + +import pytest + +__all__ = ['dbapi'] + + threadsafety = 2 +@pytest.fixture +def dbapi(): + """Get mock DB API 2 module.""" + mock_db = sys.modules[__name__] + mock_db.threadsafety = 2 + return mock_db + + class Error(Exception): pass @@ -15,6 +30,10 @@ class OperationalError(DatabaseError): pass +class InterfaceError(DatabaseError): + pass + + class InternalError(DatabaseError): pass diff --git a/DBUtils/Tests/mock_pg.py b/tests/mock_pg.py similarity index 85% rename from DBUtils/Tests/mock_pg.py rename to tests/mock_pg.py index 48fe998..b341bea 100644 --- a/DBUtils/Tests/mock_pg.py +++ b/tests/mock_pg.py @@ -22,10 +22,10 @@ class ProgrammingError(DatabaseError): def connect(*args, **kwargs): - return pgConnection(*args, **kwargs) + return PgConnection(*args, **kwargs) -class pgConnection: +class PgConnection: """The underlying pg API connection class.""" def __init__(self, dbname=None, user=None): @@ -59,13 +59,14 @@ def query(self, qstr): raise InternalError if qstr in ('begin', 'end', 'commit', 'rollback'): self.session.append(qstr) - elif qstr.startswith('select '): + return None + if qstr.startswith('select '): self.num_queries += 1 return qstr[7:] - elif qstr.startswith('set '): + if qstr.startswith('set '): self.session.append(qstr[4:]) - else: - raise ProgrammingError + return None + raise ProgrammingError class DB: @@ -77,17 +78,15 @@ def __init__(self, *args, **kw): self.__args = args, kw def __getattr__(self, name): - if self.db: - return getattr(self.db, name) - else: + if not self.db: raise AttributeError + return getattr(self.db, name) def close(self): - if self.db: - self.db.close() - self.db = None - else: + if not self.db: raise InternalError + self.db.close() + self.db = None def reopen(self): if self.db: diff --git a/tests/test_persistent_db.py b/tests/test_persistent_db.py new file mode 100644 index 0000000..32afd21 --- /dev/null +++ b/tests/test_persistent_db.py @@ -0,0 +1,262 @@ +"""Test the PersistentDB module. + +Note: +We don't test performance here, so the test does not predicate +whether PersistentDB actually will help in improving performance or not. +We also assume that the underlying SteadyDB connections are tested. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pytest + +from dbutils.persistent_db import NotSupportedError, PersistentDB, local + +from .mock_db import dbapi # noqa: F401 + + +def test_version(): + from dbutils import __version__, persistent_db + assert persistent_db.__version__ == __version__ + assert PersistentDB.version == __version__ + + +@pytest.mark.parametrize("threadsafety", [None, 0]) +def test_no_threadsafety(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + with pytest.raises(NotSupportedError): + PersistentDB(dbapi) + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_close(dbapi, closeable): # noqa: F811 + persist = PersistentDB(dbapi, closeable=closeable) + db = persist.connection() + assert db._con.valid is True + db.close() + assert closeable ^ db._con.valid + db.close() + assert closeable ^ db._con.valid + db._close() + assert db._con.valid is False + db._close() + assert db._con.valid is False + + +def test_connection(dbapi): # noqa: F811 + persist = PersistentDB(dbapi) + db = persist.connection() + db_con = db._con + assert db_con.database is None + assert db_con.user is None + db2 = persist.connection() + assert db == db2 + db3 = persist.dedicated_connection() + assert db == db3 + db3.close() + db2.close() + db.close() + + +def test_threads(dbapi): # noqa: F811 + num_threads = 3 + persist = PersistentDB(dbapi, closeable=True) + query_queue, result_queue = [], [] + for _i in range(num_threads): + query_queue.append(Queue(1)) + result_queue.append(Queue(1)) + + def run_queries(idx): + this_db = persist.connection() + db = None + while True: + try: + q = query_queue[idx].get(timeout=1) + except Empty: + q = None + if not q: + break + db = persist.connection() + if db != this_db: + res = 'error - not persistent' + elif q == 'ping': + res = 'ok - thread alive' + elif q == 'close': + db.close() + res = 'ok - connection closed' + else: + cursor = db.cursor() + cursor.execute(q) + res = cursor.fetchone() + cursor.close() + res = f'{idx}({db._usage}): {res}' + result_queue[idx].put(res, timeout=1) + if db: + db.close() + + threads = [] + for i in range(num_threads): + thread = Thread(target=run_queries, args=(i,)) + threads.append(thread) + thread.start() + for i in range(num_threads): + query_queue[i].put('ping', timeout=1) + for i in range(num_threads): + r = result_queue[i].get(timeout=1) + assert r == f'{i}(0): ok - thread alive' + assert threads[i].is_alive() + for i in range(num_threads): + for j in range(i + 1): + query_queue[i].put(f'select test{j}', timeout=1) + r = result_queue[i].get(timeout=1) + assert r == f'{i}({j + 1}): test{j}' + query_queue[1].put('select test4', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == '1(3): test4' + query_queue[1].put('close', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == '1(3): ok - connection closed' + for j in range(2): + query_queue[1].put(f'select test{j}', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == f'1({j + 1}): test{j}' + for i in range(num_threads): + assert threads[i].is_alive() + query_queue[i].put('ping', timeout=1) + for i in range(num_threads): + r = result_queue[i].get(timeout=1) + assert r == f'{i}({i + 1}): ok - thread alive' + assert threads[i].is_alive() + for i in range(num_threads): + query_queue[i].put(None, timeout=1) + + +def test_maxusage(dbapi): # noqa: F811 + persist = PersistentDB(dbapi, 20) + db = persist.connection() + assert db._maxusage == 20 + for i in range(100): + cursor = db.cursor() + cursor.execute(f'select test{i}') + r = cursor.fetchone() + cursor.close() + assert r == f'test{i}' + assert db._con.valid is True + j = i % 20 + 1 + assert db._usage == j + assert db._con.num_uses == j + assert db._con.num_queries == j + + +def test_setsession(dbapi): # noqa: F811 + persist = PersistentDB(dbapi, 3, ('set datestyle',)) + db = persist.connection() + assert db._maxusage == 3 + assert db._setsession_sql == ('set datestyle',) + assert db._con.session == ['datestyle'] + cursor = db.cursor() + cursor.execute('set test') + cursor.fetchone() + cursor.close() + for _i in range(3): + assert db._con.session == ['datestyle', 'test'] + cursor = db.cursor() + cursor.execute('select test') + cursor.fetchone() + cursor.close() + assert db._con.session == ['datestyle'] + + +def test_threadlocal(dbapi): # noqa: F811 + persist = PersistentDB(dbapi) + assert isinstance(persist.thread, local) + + class Threadlocal: + pass + + persist = PersistentDB(dbapi, threadlocal=Threadlocal) + assert isinstance(persist.thread, Threadlocal) + + +def test_ping_check(dbapi): # noqa: F811 + con_cls = dbapi.Connection + con_cls.has_ping = True + con_cls.num_pings = 0 + persist = PersistentDB(dbapi, 0, None, None, 0, True) + db = persist.connection() + assert db._con.valid is True + assert con_cls.num_pings == 0 + db.close() + db = persist.connection() + assert db._con.valid is False + assert con_cls.num_pings == 0 + persist = PersistentDB(dbapi, 0, None, None, 1, True) + db = persist.connection() + assert db._con.valid is True + assert con_cls.num_pings == 1 + db.close() + db = persist.connection() + assert db._con.valid is True + assert con_cls.num_pings == 2 + persist = PersistentDB(dbapi, 0, None, None, 2, True) + db = persist.connection() + assert db._con.valid is True + assert con_cls.num_pings == 2 + db.close() + db = persist.connection() + assert db._con.valid is False + assert con_cls.num_pings == 2 + cursor = db.cursor() + assert db._con.valid is True + assert con_cls.num_pings == 3 + cursor.execute('select test') + assert db._con.valid is True + assert con_cls.num_pings == 3 + persist = PersistentDB(dbapi, 0, None, None, 4, True) + db = persist.connection() + assert db._con.valid is True + assert con_cls.num_pings == 3 + db.close() + db = persist.connection() + assert db._con.valid is False + assert con_cls.num_pings == 3 + cursor = db.cursor() + db._con.close() + assert db._con.valid is False + assert con_cls.num_pings == 3 + cursor.execute('select test') + assert db._con.valid is True + assert con_cls.num_pings == 4 + con_cls.has_ping = False + con_cls.num_pings = 0 + + +def test_failed_transaction(dbapi): # noqa: F811 + persist = PersistentDB(dbapi) + db = persist.connection() + cursor = db.cursor() + db._con.close() + cursor.execute('select test') + db.begin() + db._con.close() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test') + cursor.execute('select test') + db.begin() + db.cancel() + db._con.close() + cursor.execute('select test') + + +def test_context_manager(dbapi): # noqa: F811 + persist = PersistentDB(dbapi) + with persist.connection() as db: + with db.cursor() as cursor: + cursor.execute('select test') + r = cursor.fetchone() + assert r == 'test' diff --git a/tests/test_persistent_pg.py b/tests/test_persistent_pg.py new file mode 100644 index 0000000..189ca18 --- /dev/null +++ b/tests/test_persistent_pg.py @@ -0,0 +1,161 @@ +"""Test the PersistentPg module. + +Note: +We don't test performance here, so the test does not predicate +whether PersistentPg actually will help in improving performance or not. +We also assume that the underlying SteadyPg connections are tested. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pg +import pytest + +from dbutils.persistent_pg import PersistentPg + + +def test_version(): + from dbutils import __version__, persistent_pg + assert persistent_pg.__version__ == __version__ + assert PersistentPg.version == __version__ + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_close(closeable): + persist = PersistentPg(closeable=closeable) + db = persist.connection() + assert db._con.db + assert db._con.valid is True + db.close() + assert closeable ^ (db._con.db is not None and db._con.valid) + db.close() + assert closeable ^ (db._con.db is not None and db._con.valid) + db._close() + assert not db._con.db + db._close() + assert not db._con.db + + +def test_threads(): + num_threads = 3 + persist = PersistentPg() + query_queue, result_queue = [], [] + for _i in range(num_threads): + query_queue.append(Queue(1)) + result_queue.append(Queue(1)) + + def run_queries(idx): + this_db = persist.connection().db + db = None + while True: + try: + q = query_queue[idx].get(timeout=1) + except Empty: + q = None + if not q: + break + db = persist.connection() + if db.db != this_db: + res = 'error - not persistent' + elif q == 'ping': + res = 'ok - thread alive' + elif q == 'close': + db.db.close() + res = 'ok - connection closed' + else: + res = db.query(q) + res = f'{idx}({db._usage}): {res}' + result_queue[idx].put(res, timeout=1) + if db: + db.close() + + threads = [] + for i in range(num_threads): + thread = Thread(target=run_queries, args=(i,)) + threads.append(thread) + thread.start() + for i in range(num_threads): + query_queue[i].put('ping', timeout=1) + for i in range(num_threads): + r = result_queue[i].get(timeout=1) + assert r == f'{i}(0): ok - thread alive' + assert threads[i].is_alive() + for i in range(num_threads): + for j in range(i + 1): + query_queue[i].put(f'select test{j}', timeout=1) + r = result_queue[i].get(timeout=1) + assert r == f'{i}({j + 1}): test{j}' + query_queue[1].put('select test4', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == '1(3): test4' + query_queue[1].put('close', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == '1(3): ok - connection closed' + for j in range(2): + query_queue[1].put(f'select test{j}', timeout=1) + r = result_queue[1].get(timeout=1) + assert r == f'1({j + 1}): test{j}' + for i in range(num_threads): + assert threads[i].is_alive() + query_queue[i].put('ping', timeout=1) + for i in range(num_threads): + r = result_queue[i].get(timeout=1) + assert r == f'{i}({i + 1}): ok - thread alive' + assert threads[i].is_alive() + for i in range(num_threads): + query_queue[i].put(None, timeout=1) + + +def test_maxusage(): + persist = PersistentPg(20) + db = persist.connection() + assert db._maxusage == 20 + for i in range(100): + r = db.query(f'select test{i}') + assert r == f'test{i}' + assert db.db.status + j = i % 20 + 1 + assert db._usage == j + assert db.num_queries == j + + +def test_setsession(): + persist = PersistentPg(3, ('set datestyle',)) + db = persist.connection() + assert db._maxusage == 3 + assert db._setsession_sql == ('set datestyle',) + assert db.db.session == ['datestyle'] + db.query('set test') + for _i in range(3): + assert db.db.session == ['datestyle', 'test'] + db.query('select test') + assert db.db.session == ['datestyle'] + + +def test_failed_transaction(): + persist = PersistentPg() + db = persist.connection() + db._con.close() + assert db.query('select test') == 'test' + db.begin() + db._con.close() + with pytest.raises(pg.InternalError): + db.query('select test') + assert db.query('select test') == 'test' + db.begin() + assert db.query('select test') == 'test' + db.rollback() + db._con.close() + assert db.query('select test') == 'test' + + +def test_context_manager(): + persist = PersistentPg() + with persist.connection() as db: + db.query('select test') + assert db.num_queries == 1 diff --git a/tests/test_pooled_db.py b/tests/test_pooled_db.py new file mode 100644 index 0000000..b5ef7cf --- /dev/null +++ b/tests/test_pooled_db.py @@ -0,0 +1,1285 @@ +"""Test the PooledDB module. + +Note: +We don't test performance here, so the test does not predicate +whether PooledDB actually will help in improving performance or not. +We also assume that the underlying SteadyDB connections are tested. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pytest + +from dbutils.pooled_db import ( + InvalidConnectionError, + NotSupportedError, + PooledDB, + SharedDBConnection, + TooManyConnectionsError, +) +from dbutils.steady_db import SteadyDBConnection + +from .mock_db import dbapi # noqa: F401 + + +def test_version(): + from dbutils import __version__, pooled_db + assert pooled_db.__version__ == __version__ + assert PooledDB.version == __version__ + + +@pytest.mark.parametrize("threadsafety", [None, 0]) +def test_no_threadsafety(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + with pytest.raises(NotSupportedError): + PooledDB(dbapi) + + +@pytest.mark.parametrize("threadsafety", [1, 2, 3]) +def test_threadsafety(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + pool = PooledDB(dbapi, 0, 0, 1) + assert hasattr(pool, '_maxshared') + if threadsafety > 1: + assert pool._maxshared == 1 + assert hasattr(pool, '_shared_cache') + else: + assert pool._maxshared == 0 + assert not hasattr(pool, '_shared_cache') + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_create_connection(dbapi, threadsafety): # noqa: F811, PLR0915 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB( + dbapi, 1, 1, 1, 0, False, None, None, True, None, None, + 'PooledDBTestDB', user='PooledDBTestUser') + assert hasattr(pool, '_idle_cache') + assert len(pool._idle_cache) == 1 + if shareable: + assert hasattr(pool, '_shared_cache') + assert len(pool._shared_cache) == 0 + else: + assert not hasattr(pool, '_shared_cache') + assert hasattr(pool, '_maxusage') + assert pool._maxusage is None + assert hasattr(pool, '_setsession') + assert pool._setsession is None + con = pool._idle_cache[0] + assert isinstance(con, SteadyDBConnection) + assert hasattr(con, '_maxusage') + assert con._maxusage == 0 + assert hasattr(con, '_setsession_sql') + assert con._setsession_sql is None + db = pool.connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + assert hasattr(db, '_con') + assert db._con == con + assert hasattr(db, 'cursor') + assert hasattr(db, '_usage') + assert db._usage == 0 + assert hasattr(con, '_con') + db_con = con._con + assert hasattr(db_con, 'database') + assert db_con.database == 'PooledDBTestDB' + assert hasattr(db_con, 'user') + assert db_con.user == 'PooledDBTestUser' + assert hasattr(db_con, 'open_cursors') + assert db_con.open_cursors == 0 + assert hasattr(db_con, 'num_uses') + assert db_con.num_uses == 0 + assert hasattr(db_con, 'num_queries') + assert db_con.num_queries == 0 + cursor = db.cursor() + assert db_con.open_cursors == 1 + cursor.execute('select test') + r = cursor.fetchone() + cursor.close() + assert db_con.open_cursors == 0 + assert r == 'test' + assert db_con.num_queries == 1 + assert db._usage == 1 + cursor = db.cursor() + assert db_con.open_cursors == 1 + cursor.execute('set sessiontest') + cursor2 = db.cursor() + assert db_con.open_cursors == 2 + cursor2.close() + assert db_con.open_cursors == 1 + cursor.close() + assert db_con.open_cursors == 0 + assert db_con.num_queries == 1 + assert db._usage == 2 + assert db_con.session == ['rollback', 'sessiontest'] + pool = PooledDB(dbapi, 1, 1, 1) + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + db.close() + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection(True) + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + db.close() + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection(False) + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + assert db._usage == 0 + db_con = db._con._con + assert db_con.database is None + assert db_con.user is None + db.close() + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.dedicated_connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + assert db._usage == 0 + db_con = db._con._con + assert db_con.database is None + assert db_con.user is None + db.close() + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + pool = PooledDB(dbapi, 0, 0, 0, 0, False, 3, ('set datestyle',)) + assert pool._maxusage == 3 + assert pool._setsession == ('set datestyle',) + con = pool.connection()._con + assert con._maxusage == 3 + assert con._setsession_sql == ('set datestyle',) + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_close_connection(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB( + dbapi, 0, 1, 1, 0, False, None, None, True, None, None, + 'PooledDBTestDB', user='PooledDBTestUser') + assert hasattr(pool, '_idle_cache') + assert len(pool._idle_cache) == 0 + db = pool.connection() + assert hasattr(db, '_con') + con = db._con + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + assert hasattr(db, '_shared_con') + shared_con = db._shared_con + assert pool._shared_cache[0] == shared_con + assert hasattr(shared_con, 'shared') + assert shared_con.shared == 1 + assert hasattr(shared_con, 'con') + assert shared_con.con == con + assert isinstance(con, SteadyDBConnection) + assert hasattr(con, '_con') + db_con = con._con + assert hasattr(db_con, 'num_queries') + assert db._usage == 0 + assert db_con.num_queries == 0 + db.cursor().execute('select test') + assert db._usage == 1 + assert db_con.num_queries == 1 + db.close() + assert db._con is None + if shareable: + assert db._shared_con is None + assert shared_con.shared == 0 + with pytest.raises(InvalidConnectionError): + assert db._usage + assert not hasattr(db_con, '_num_queries') + assert len(pool._idle_cache) == 1 + assert pool._idle_cache[0]._con == db_con + if shareable: + assert len(pool._shared_cache) == 0 + db.close() + if shareable: + assert shared_con.shared == 0 + db = pool.connection() + assert db._con == con + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + shared_con = db._shared_con + assert pool._shared_cache[0] == shared_con + assert shared_con.con == con + assert shared_con.shared == 1 + assert db._usage == 1 + assert db_con.num_queries == 1 + assert hasattr(db_con, 'database') + assert db_con.database == 'PooledDBTestDB' + assert hasattr(db_con, 'user') + assert db_con.user == 'PooledDBTestUser' + db.cursor().execute('select test') + assert db_con.num_queries == 2 + db.cursor().execute('select test') + assert db_con.num_queries == 3 + db.close() + assert len(pool._idle_cache) == 1 + assert pool._idle_cache[0]._con == db_con + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection(False) + assert db._con == con + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + db.close() + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_close_all(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 10) + assert len(pool._idle_cache) == 10 + pool.close() + assert len(pool._idle_cache) == 0 + pool = PooledDB(dbapi, 10) + closed = ['no'] + + def close(what=closed): + what[0] = 'yes' + + pool._idle_cache[7]._con.close = close + assert closed == ['no'] + del pool + assert closed == ['yes'] + pool = PooledDB(dbapi, 10, 10, 5) + assert len(pool._idle_cache) == 10 + if shareable: + assert len(pool._shared_cache) == 0 + cache = [] + for _i in range(5): + cache.append(pool.connection()) + assert len(pool._idle_cache) == 5 + if shareable: + assert len(pool._shared_cache) == 5 + else: + assert len(pool._idle_cache) == 5 + pool.close() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + pool = PooledDB(dbapi, 10, 10, 5) + closed = [] + + def close_idle(what=closed): + what.append('idle') + + def close_shared(what=closed): + what.append('shared') + + if shareable: + cache = [] + for _i in range(5): + cache.append(pool.connection()) + pool._shared_cache[3].con.close = close_shared + else: + pool._idle_cache[7]._con.close = close_shared + pool._idle_cache[3]._con.close = close_idle + assert closed == [] + del pool + if shareable: + del cache + assert closed == ['idle', 'shared'] + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_shareable_connection(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 0, 1, 2) + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + db1 = pool.connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + db2 = pool.connection() + assert db1._con != db2._con + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 2 + db3 = pool.connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 2 + assert db3._con == db1._con + assert db1._shared_con.shared == 2 + assert db2._shared_con.shared == 1 + else: + assert db3._con != db1._con + assert db3._con != db2._con + db4 = pool.connection() + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 2 + assert db4._con == db2._con + assert db1._shared_con.shared == 2 + assert db2._shared_con.shared == 2 + else: + assert db4._con != db1._con + assert db4._con != db2._con + assert db4._con != db3._con + db5 = pool.connection(False) + assert db5._con != db1._con + assert db5._con != db2._con + assert db5._con != db3._con + assert db5._con != db4._con + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 2 + assert db1._shared_con.shared == 2 + assert db2._shared_con.shared == 2 + db5.close() + assert len(pool._idle_cache) == 1 + db5 = pool.connection() + if shareable: + assert len(pool._idle_cache) == 1 + assert len(pool._shared_cache) == 2 + assert db5._shared_con.shared == 3 + else: + assert len(pool._idle_cache) == 0 + pool = PooledDB(dbapi, 0, 0, 1) + assert len(pool._idle_cache) == 0 + db1 = pool.connection(False) + if shareable: + assert len(pool._shared_cache) == 0 + db2 = pool.connection() + if shareable: + assert len(pool._shared_cache) == 1 + db3 = pool.connection() + if shareable: + assert len(pool._shared_cache) == 1 + assert db2._con == db3._con + else: + assert db2._con != db3._con + del db3 + if shareable: + assert len(pool._idle_cache) == 0 + assert len(pool._shared_cache) == 1 + else: + assert len(pool._idle_cache) == 1 + del db2 + if shareable: + assert len(pool._idle_cache) == 1 + assert len(pool._shared_cache) == 0 + else: + assert len(pool._idle_cache) == 2 + del db1 + if shareable: + assert len(pool._idle_cache) == 2 + assert len(pool._shared_cache) == 0 + else: + assert len(pool._idle_cache) == 3 + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_min_max_cached(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 3) + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(3)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(6)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 6 + pool = PooledDB(dbapi, 0, 3) + assert len(pool._idle_cache) == 0 + cache = [pool.connection() for _i in range(3)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(6)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + pool = PooledDB(dbapi, 3, 3) + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(3)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(6)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + pool = PooledDB(dbapi, 3, 2) + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(4)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + pool = PooledDB(dbapi, 2, 5) + assert len(pool._idle_cache) == 2 + cache = [pool.connection() for _i in range(10)] + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 5 + pool = PooledDB(dbapi, 1, 2, 3) + assert len(pool._idle_cache) == 1 + cache = [pool.connection(False) for _i in range(4)] + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 2 + cache = [pool.connection() for _i in range(10)] + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 3 + assert cache + del cache + assert len(pool._idle_cache) == 2 + if shareable: + assert len(pool._shared_cache) == 0 + pool = PooledDB(dbapi, 1, 3, 2) + assert len(pool._idle_cache) == 1 + cache = [pool.connection(False) for _i in range(4)] + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + cache = [pool.connection() for _i in range(10)] + if shareable: + assert len(pool._idle_cache) == 1 + assert len(pool._shared_cache) == 2 + else: + assert len(pool._idle_cache) == 0 + assert cache + del cache + assert len(pool._idle_cache) == 3 + if shareable: + assert len(pool._shared_cache) == 0 + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_max_shared(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi) + assert len(pool._idle_cache) == 0 + cache = [pool.connection() for _i in range(10)] + assert len(cache) == 10 + assert len(pool._idle_cache) == 0 + pool = PooledDB(dbapi, 1, 1, 0) + assert len(pool._idle_cache) == 1 + cache = [pool.connection() for _i in range(10)] + assert len(cache) == 10 + assert len(pool._idle_cache) == 0 + pool = PooledDB(dbapi, 0, 0, 1) + cache = [pool.connection() for _i in range(10)] + assert len(cache) == 10 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + pool = PooledDB(dbapi, 1, 1, 1) + assert len(pool._idle_cache) == 1 + cache = [pool.connection() for _i in range(10)] + assert len(cache) == 10 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + pool = PooledDB(dbapi, 0, 0, 7) + cache = [pool.connection(False) for _i in range(3)] + assert len(cache) == 3 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + cache = [pool.connection() for _i in range(10)] + assert len(cache) == 10 + assert len(pool._idle_cache) == 3 + if shareable: + assert len(pool._shared_cache) == 7 + + +def test_sort_shared(dbapi): # noqa: F811 + pool = PooledDB(dbapi, 0, 4, 4) + cache = [] + for _i in range(6): + db = pool.connection() + db.cursor().execute('select test') + cache.append(db) + for i, db in enumerate(cache): + assert db._shared_con.shared == 1 if 2 <= i < 4 else 2 + cache[2].begin() + cache[3].begin() + db = pool.connection() + assert db._con is cache[0]._con + db.close() + cache[3].rollback() + db = pool.connection() + assert db._con is cache[3]._con + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_equally_shared(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 5, 5, 5) + assert len(pool._idle_cache) == 5 + for _i in range(15): + db = pool.connection(False) + db.cursor().execute('select test') + db.close() + assert len(pool._idle_cache) == 5 + for i in range(5): + con = pool._idle_cache[i] + assert con._usage == 3 + assert con._con.num_queries == 3 + cache = [] + for _i in range(35): + db = pool.connection() + db.cursor().execute('select test') + cache.append(db) + del db + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 5 + for i in range(5): + con = pool._shared_cache[i] + assert con.shared == 7 + con = con.con + assert con._usage == 10 + assert con._con.num_queries == 10 + del cache + assert len(pool._idle_cache) == 5 + if shareable: + assert len(pool._shared_cache) == 0 + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_many_shared(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 0, 0, 5) + cache = [] + for _i in range(35): + db = pool.connection() + db.cursor().execute('select test1') + db.cursor().execute('select test2') + db.cursor().callproc('test3') + cache.append(db) + del db + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 5 + for i in range(5): + con = pool._shared_cache[i] + assert con.shared == 7 + con = con.con + assert con._usage == 21 + assert con._con.num_queries == 14 + cache[3] = cache[8] = cache[33] = None + cache[12] = cache[17] = cache[34] = None + assert len(pool._shared_cache) == 5 + assert pool._shared_cache[0].shared == 7 + assert pool._shared_cache[1].shared == 7 + assert pool._shared_cache[2].shared == 5 + assert pool._shared_cache[3].shared == 4 + assert pool._shared_cache[4].shared == 6 + for db in cache: + if db: + db.cursor().callproc('test4') + for _i in range(6): + db = pool.connection() + db.cursor().callproc('test4') + cache.append(db) + del db + for i in range(5): + con = pool._shared_cache[i] + assert con.shared == 7 + con = con.con + assert con._usage == 28 + assert con._con.num_queries == 14 + del cache + if shareable: + assert len(pool._idle_cache) == 5 + assert len(pool._shared_cache) == 0 + else: + assert len(pool._idle_cache) == 35 + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_rollback(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + pool = PooledDB(dbapi, 0, 1) + assert len(pool._idle_cache) == 0 + db = pool.connection(False) + assert len(pool._idle_cache) == 0 + assert db._con._con.open_cursors == 0 + cursor = db.cursor() + assert db._con._con.open_cursors == 1 + cursor.execute('set doit1') + db.commit() + cursor.execute('set dont1') + cursor.close() + assert db._con._con.open_cursors == 0 + del db + assert len(pool._idle_cache) == 1 + db = pool.connection(False) + assert len(pool._idle_cache) == 0 + assert db._con._con.open_cursors == 0 + cursor = db.cursor() + assert db._con._con.open_cursors == 1 + cursor.execute('set doit2') + cursor.close() + assert db._con._con.open_cursors == 0 + db.commit() + session = db._con._con.session + db.close() + assert session == [ + 'doit1', 'commit', 'dont1', 'rollback', + 'doit2', 'commit', 'rollback'] + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_maxconnections(dbapi, threadsafety): # noqa: F811, PLR0915 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 1, 2, 2, 3) + assert hasattr(pool, '_maxconnections') + assert pool._maxconnections == 3 + assert hasattr(pool, '_connections') + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + cache = [] + for _i in range(3): + cache.append(pool.connection(False)) + assert pool._connections == 3 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + cache = [] + assert pool._connections == 0 + assert len(pool._idle_cache) == 2 + if shareable: + assert len(pool._shared_cache) == 0 + for _i in range(3): + cache.append(pool.connection()) + assert len(pool._idle_cache) == 0 + if shareable: + assert pool._connections == 2 + assert len(pool._shared_cache) == 2 + cache.append(pool.connection(False)) + assert pool._connections == 3 + assert len(pool._shared_cache) == 2 + else: + assert pool._connections == 3 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + if shareable: + cache.append(pool.connection(True)) + assert pool._connections == 3 + else: + with pytest.raises(TooManyConnectionsError): + pool.connection() + del cache + assert pool._connections == 0 + assert len(pool._idle_cache) == 2 + pool = PooledDB(dbapi, 0, 1, 1, 1) + assert pool._maxconnections == 1 + assert pool._connections == 0 + assert len(pool._idle_cache) == 0 + db = pool.connection(False) + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + assert db + del db + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + cache = [pool.connection()] + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 1 + cache.append(pool.connection()) + assert pool._connections == 1 + assert len(pool._shared_cache) == 1 + assert pool._shared_cache[0].shared == 2 + else: + with pytest.raises(TooManyConnectionsError): + pool.connection() + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + if shareable: + cache.append(pool.connection(True)) + assert pool._connections == 1 + assert len(pool._shared_cache) == 1 + assert pool._shared_cache[0].shared == 3 + else: + with pytest.raises(TooManyConnectionsError): + pool.connection(True) + del cache + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection(False) + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + assert db + del db + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + pool = PooledDB(dbapi, 1, 2, 2, 1) + assert pool._maxconnections == 2 + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + cache = [pool.connection(False)] + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + cache.append(pool.connection(False)) + assert pool._connections == 2 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledDB(dbapi, 4, 3, 2, 1, False) + assert pool._maxconnections == 4 + assert pool._connections == 0 + assert len(pool._idle_cache) == 4 + cache = [] + for _i in range(4): + cache.append(pool.connection(False)) + assert pool._connections == 4 + assert len(pool._idle_cache) == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledDB(dbapi, 1, 2, 3, 4, False) + assert pool._maxconnections == 4 + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + for _i in range(4): + cache.append(pool.connection()) + assert len(pool._idle_cache) == 0 + if shareable: + assert pool._connections == 3 + assert len(pool._shared_cache) == 3 + cache.append(pool.connection()) + assert pool._connections == 3 + cache.append(pool.connection(False)) + assert pool._connections == 4 + else: + assert pool._connections == 4 + with pytest.raises(TooManyConnectionsError): + pool.connection() + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + pool = PooledDB(dbapi, 0, 0, 3, 3, False) + assert pool._maxconnections == 3 + assert pool._connections == 0 + cache = [] + for _i in range(3): + cache.append(pool.connection(False)) + assert pool._connections == 3 + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection(True) + cache = [] + assert pool._connections == 0 + for _i in range(3): + cache.append(pool.connection()) + assert pool._connections == 3 + if shareable: + for _i in range(3): + cache.append(pool.connection()) + assert pool._connections == 3 + else: + with pytest.raises(TooManyConnectionsError): + pool.connection() + with pytest.raises(TooManyConnectionsError): + pool.connection(False) + pool = PooledDB(dbapi, 0, 0, 3) + assert pool._maxconnections == 0 + assert pool._connections == 0 + cache = [] + for _i in range(10): + cache.append(pool.connection(False)) + cache.append(pool.connection()) + if shareable: + assert pool._connections == 13 + assert len(pool._shared_cache) == 3 + else: + assert pool._connections == 20 + pool = PooledDB(dbapi, 1, 1, 1, 1, True) + assert pool._maxconnections == 1 + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + db = pool.connection(False) + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + + def connection(): + db = pool.connection() + cursor = db.cursor() + cursor.execute('set thread') + cursor.close() + db.close() + + thread = Thread(target=connection) + thread.start() + thread.join(0.1) + assert thread.is_alive() + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + if shareable: + assert len(pool._shared_cache) == 0 + session = db._con._con.session + assert session == ['rollback'] + del db + thread.join(0.1) + assert not thread.is_alive() + assert pool._connections == 0 + assert len(pool._idle_cache) == 1 + if shareable: + assert len(pool._shared_cache) == 0 + db = pool.connection(False) + assert pool._connections == 1 + assert len(pool._idle_cache) == 0 + assert session == ['rollback', 'rollback', 'thread', 'rollback'] + assert db + del db + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +@pytest.mark.parametrize("maxusage", [0, 3, 7]) +def test_maxusage(dbapi, threadsafety, maxusage): # noqa: F811 + dbapi.threadsafety = threadsafety + pool = PooledDB(dbapi, 0, 0, 0, 1, False, maxusage) + assert pool._maxusage == maxusage + assert len(pool._idle_cache) == 0 + db = pool.connection(False) + assert db._con._maxusage == maxusage + assert len(pool._idle_cache) == 0 + assert db._con._con.open_cursors == 0 + assert db._usage == 0 + assert db._con._con.num_uses == 0 + assert db._con._con.num_queries == 0 + for i in range(20): + cursor = db.cursor() + assert db._con._con.open_cursors == 1 + cursor.execute(f'select test{i}') + r = cursor.fetchone() + assert r == f'test{i}' + cursor.close() + assert db._con._con.open_cursors == 0 + j = i % maxusage + 1 if maxusage else i + 1 + assert db._usage == j + assert db._con._con.num_uses == j + assert db._con._con.num_queries == j + db.cursor().callproc('test') + assert db._con._con.open_cursors == 0 + assert db._usage == j + 1 + assert db._con._con.num_uses == j + 1 + assert db._con._con.num_queries == j + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_setsession(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + setsession = ('set time zone', 'set datestyle') + pool = PooledDB(dbapi, 0, 0, 0, 1, False, None, setsession) + assert pool._setsession == setsession + db = pool.connection(False) + assert db._setsession_sql == setsession + assert db._con._con.session == ['time zone', 'datestyle'] + db.cursor().execute('select test') + db.cursor().execute('set test1') + assert db._usage == 2 + assert db._con._con.num_uses == 4 + assert db._con._con.num_queries == 1 + assert db._con._con.session == ['time zone', 'datestyle', 'test1'] + db.close() + db = pool.connection(False) + assert db._setsession_sql == setsession + assert db._con._con.session == \ + ['time zone', 'datestyle', 'test1', 'rollback'] + db._con._con.close() + db.cursor().execute('select test') + db.cursor().execute('set test2') + assert db._con._con.session == ['time zone', 'datestyle', 'test2'] + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_one_thread_two_connections(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + shareable = threadsafety > 1 + pool = PooledDB(dbapi, 2) + db1 = pool.connection() + for _i in range(5): + db1.cursor().execute('select test') + db2 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(7): + db2.cursor().execute('select test') + assert db1._con._con.num_queries == 5 + assert db2._con._con.num_queries == 7 + del db1 + db1 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(3): + db1.cursor().execute('select test') + assert db1._con._con.num_queries == 8 + db2.cursor().execute('select test') + assert db2._con._con.num_queries == 8 + pool = PooledDB(dbapi, 0, 0, 2) + db1 = pool.connection() + for _i in range(5): + db1.cursor().execute('select test') + db2 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(7): + db2.cursor().execute('select test') + assert db1._con._con.num_queries == 5 + assert db2._con._con.num_queries == 7 + del db1 + db1 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(3): + db1.cursor().execute('select test') + assert db1._con._con.num_queries == 8 + db2.cursor().execute('select test') + assert db2._con._con.num_queries == 8 + pool = PooledDB(dbapi, 0, 0, 1) + db1 = pool.connection() + db2 = pool.connection() + assert db1 != db2 + if shareable: + assert db1._con == db2._con + else: + assert db1._con != db2._con + del db1 + db1 = pool.connection(False) + assert db1 != db2 + assert db1._con != db2._con + + +@pytest.mark.parametrize("threadsafety", [1, 2]) +def test_three_threads_two_connections(dbapi, threadsafety): # noqa: F811 + dbapi.threadsafety = threadsafety + pool = PooledDB(dbapi, 2, 2, 0, 2, True) + queue = Queue(3) + + def connection(): + queue.put(pool.connection(), timeout=1) + + for _i in range(3): + Thread(target=connection).start() + db1 = queue.get(timeout=1) + db2 = queue.get(timeout=1) + assert db1 != db2 + db1_con = db1._con + db2_con = db2._con + assert db1_con != db2_con + with pytest.raises(Empty): + queue.get(timeout=0.1) + del db1 + db1 = queue.get(timeout=1) + assert db1 != db2 + assert db1._con != db2._con + assert db1._con == db1_con + pool = PooledDB(dbapi, 2, 2, 1, 2, True) + db1 = pool.connection(False) + db2 = pool.connection(False) + assert db1 != db2 + db1_con = db1._con + db2_con = db2._con + assert db1_con != db2_con + Thread(target=connection).start() + with pytest.raises(Empty): + queue.get(timeout=0.1) + del db1 + db1 = queue.get(timeout=1) + assert db1 != db2 + assert db1._con != db2._con + assert db1._con == db1_con + + +def test_ping_check(dbapi): # noqa: F811 + con_cls = dbapi.Connection + con_cls.has_ping = True + con_cls.num_pings = 0 + pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 0) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 0 + db._con.close() + db.close() + db = pool.connection() + assert not db._con._con.valid + assert con_cls.num_pings == 0 + pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 0) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 0 + db._con.close() + db = pool.connection() + assert not db._con._con.valid + assert con_cls.num_pings == 0 + pool = PooledDB(dbapi, 1, 1, 0, 0, False, None, None, True, None, 1) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 1 + db._con.close() + db.close() + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 2 + pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 1) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 3 + db._con.close() + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 4 + pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 2) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 4 + db._con.close() + db = pool.connection() + assert not db._con._con.valid + assert con_cls.num_pings == 4 + db.cursor() + assert db._con._con.valid + assert con_cls.num_pings == 5 + pool = PooledDB(dbapi, 1, 1, 1, 0, False, None, None, True, None, 4) + db = pool.connection() + assert db._con._con.valid + assert con_cls.num_pings == 5 + db._con.close() + db = pool.connection() + assert not db._con._con.valid + assert con_cls.num_pings == 5 + cursor = db.cursor() + db._con.close() + assert not db._con._con.valid + assert con_cls.num_pings == 5 + cursor.execute('select test') + assert db._con._con.valid + assert con_cls.num_pings == 6 + con_cls.has_ping = False + con_cls.num_pings = 0 + + +def test_failed_transaction(dbapi): # noqa: F811 + pool = PooledDB(dbapi, 0, 1, 1) + db = pool.connection() + cursor = db.cursor() + db._con._con.close() + cursor.execute('select test') + db.begin() + db._con._con.close() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test') + cursor.execute('select test') + db.begin() + db.cancel() + db._con._con.close() + cursor.execute('select test') + pool = PooledDB(dbapi, 1, 1, 0) + db = pool.connection() + cursor = db.cursor() + db._con._con.close() + cursor.execute('select test') + db.begin() + db._con._con.close() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test') + cursor.execute('select test') + db.begin() + db.cancel() + db._con._con.close() + cursor.execute('select test') + + +def test_shared_in_transaction(dbapi): # noqa: F811 + pool = PooledDB(dbapi, 0, 1, 1) + db = pool.connection() + db.begin() + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledDB(dbapi, 0, 2, 2) + db1 = pool.connection() + db2 = pool.connection() + assert db2._con is not db1._con + db2.close() + db2 = pool.connection() + assert db2._con is not db1._con + db = pool.connection() + assert db._con is db1._con + db.close() + db1.begin() + db = pool.connection() + assert db._con is db2._con + db.close() + db2.begin() + pool.connection(False) + with pytest.raises(TooManyConnectionsError): + pool.connection() + db1.rollback() + db = pool.connection() + assert db._con is db1._con + + +def test_reset_transaction(dbapi): # noqa: F811 + pool = PooledDB(dbapi, 1, 1, 0) + db = pool.connection() + db.begin() + con = db._con + assert con._transaction + assert con._con.session == ['rollback'] + db.close() + assert pool.connection()._con is con + assert not con._transaction + assert con._con.session == ['rollback'] * 3 + pool = PooledDB(dbapi, 1, 1, 0, reset=False) + db = pool.connection() + db.begin() + con = db._con + assert con._transaction + assert con._con.session == [] + db.close() + assert pool.connection()._con is con + assert not con._transaction + assert con._con.session == ['rollback'] + + +def test_context_manager(dbapi): # noqa: F811 + pool = PooledDB(dbapi, 1, 1, 1) + con = pool._idle_cache[0]._con + with pool.connection() as db: + assert hasattr(db, '_shared_con') + assert not pool._idle_cache + assert con.valid + with db.cursor() as cursor: + assert con.open_cursors == 1 + cursor.execute('select test') + r = cursor.fetchone() + assert con.open_cursors == 0 + assert r == 'test' + assert con.num_queries == 1 + assert pool._idle_cache + with pool.dedicated_connection() as db: + assert not hasattr(db, '_shared_con') + assert not pool._idle_cache + with db.cursor() as cursor: + assert con.open_cursors == 1 + cursor.execute('select test') + r = cursor.fetchone() + assert con.open_cursors == 0 + assert r == 'test' + assert con.num_queries == 2 + assert pool._idle_cache + + +def test_shared_db_connection_create(dbapi): # noqa: F811 + db_con = dbapi.connect() + con = SharedDBConnection(db_con) + assert con.con == db_con + assert con.shared == 1 + + +def test_shared_db_connection_share_and_unshare(dbapi): # noqa: F811 + con = SharedDBConnection(dbapi.connect()) + assert con.shared == 1 + con.share() + assert con.shared == 2 + con.share() + assert con.shared == 3 + con.unshare() + assert con.shared == 2 + con.unshare() + assert con.shared == 1 + + +def test_shared_db_connection_compare(dbapi): # noqa: F811 + con1 = SharedDBConnection(dbapi.connect()) + con1.con._transaction = False + con2 = SharedDBConnection(dbapi.connect()) + con2.con._transaction = False + assert con1 == con2 + assert con1 <= con2 + assert con1 >= con2 + assert not con1 != con2 # noqa: SIM202 + assert not con1 < con2 + assert not con1 > con2 + con2.share() + assert not con1 == con2 # noqa: SIM201 + assert con1 <= con2 + assert not con1 >= con2 + assert con1 != con2 + assert con1 < con2 + assert not con1 > con2 + con1.con._transaction = True + assert not con1 == con2 # noqa: SIM201 + assert not con1 <= con2 + assert con1 >= con2 + assert con1 != con2 + assert not con1 < con2 + assert con1 > con2 diff --git a/tests/test_pooled_pg.py b/tests/test_pooled_pg.py new file mode 100644 index 0000000..0eed92b --- /dev/null +++ b/tests/test_pooled_pg.py @@ -0,0 +1,321 @@ +"""Test the PooledPg module. + +Note: +We don't test performance here, so the test does not predicate +whether PooledPg actually will help in improving performance or not. +We also assume that the underlying SteadyPg connections are tested. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pg # noqa: F401 +import pytest + +from dbutils.pooled_pg import ( + InvalidConnectionError, + PooledPg, + TooManyConnectionsError, +) +from dbutils.steady_pg import SteadyPgConnection + + +def test_version(): + from dbutils import __version__, pooled_pg + assert pooled_pg.__version__ == __version__ + assert PooledPg.version == __version__ + + +def test_create_connection(): + pool = PooledPg( + 1, 1, 0, False, None, None, False, + 'PooledPgTestDB', user='PooledPgTestUser') + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 1 + assert hasattr(pool, '_maxusage') + assert pool._maxusage is None + assert hasattr(pool, '_setsession') + assert pool._setsession is None + assert hasattr(pool, '_reset') + assert not pool._reset + db_con = pool._cache.get(0) + pool._cache.put(db_con, 0) + assert isinstance(db_con, SteadyPgConnection) + db = pool.connection() + assert pool._cache.qsize() == 0 + assert hasattr(db, '_con') + assert db._con == db_con + assert hasattr(db, 'query') + assert hasattr(db, 'num_queries') + assert db.num_queries == 0 + assert hasattr(db, '_maxusage') + assert db._maxusage == 0 + assert hasattr(db, '_setsession_sql') + assert db._setsession_sql is None + assert hasattr(db, 'dbname') + assert db.dbname == 'PooledPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'PooledPgTestUser' + db.query('select test') + assert db.num_queries == 1 + pool = PooledPg(1) + db = pool.connection() + assert hasattr(db, 'dbname') + assert db.dbname is None + assert hasattr(db, 'user') + assert db.user is None + assert hasattr(db, 'num_queries') + assert db.num_queries == 0 + pool = PooledPg(0, 0, 0, False, 3, ('set datestyle',)) + assert pool._maxusage == 3 + assert pool._setsession == ('set datestyle',) + db = pool.connection() + assert db._maxusage == 3 + assert db._setsession_sql == ('set datestyle',) + + +def test_close_connection(): + pool = PooledPg( + 0, 1, 0, False, None, None, False, + 'PooledPgTestDB', user='PooledPgTestUser') + db = pool.connection() + assert hasattr(db, '_con') + db_con = db._con + assert isinstance(db_con, SteadyPgConnection) + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 0 + assert db.num_queries == 0 + db.query('select test') + assert db.num_queries == 1 + db.close() + with pytest.raises(InvalidConnectionError): + assert db.num_queries + db = pool.connection() + assert hasattr(db, 'dbname') + assert db.dbname == 'PooledPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'PooledPgTestUser' + assert db.num_queries == 1 + db.query('select test') + assert db.num_queries == 2 + db = pool.connection() + assert pool._cache.qsize() == 1 + assert pool._cache.get(0) == db_con + assert db + del db + + +def test_min_max_cached(): + pool = PooledPg(3) + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 3 + cache = [pool.connection() for _i in range(3)] + assert pool._cache.qsize() == 0 + for _i in range(3): + cache.pop().close() + assert pool._cache.qsize() == 3 + for _i in range(6): + cache.append(pool.connection()) + assert pool._cache.qsize() == 0 + for _i in range(6): + cache.pop().close() + assert pool._cache.qsize() == 6 + pool = PooledPg(3, 4) + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 3 + cache = [pool.connection() for _i in range(3)] + assert pool._cache.qsize() == 0 + for _i in range(3): + cache.pop().close() + assert pool._cache.qsize() == 3 + for _i in range(6): + cache.append(pool.connection()) + assert pool._cache.qsize() == 0 + for _i in range(6): + cache.pop().close() + assert pool._cache.qsize() == 4 + pool = PooledPg(3, 2) + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 3 + cache = [pool.connection() for _i in range(4)] + assert pool._cache.qsize() == 0 + for _i in range(4): + cache.pop().close() + assert pool._cache.qsize() == 3 + pool = PooledPg(2, 5) + assert hasattr(pool, '_cache') + assert pool._cache.qsize() == 2 + cache = [pool.connection() for _i in range(10)] + assert pool._cache.qsize() == 0 + for _i in range(10): + cache.pop().close() + assert pool._cache.qsize() == 5 + + +def test_max_connections(): + pool = PooledPg(1, 2, 3) + assert pool._cache.qsize() == 1 + cache = [pool.connection() for _i in range(3)] + assert pool._cache.qsize() == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledPg(0, 1, 1, False) + assert pool._blocking == 0 + assert pool._cache.qsize() == 0 + db = pool.connection() + assert pool._cache.qsize() == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection() + assert db + del db + assert cache + del cache + pool = PooledPg(1, 2, 1) + assert pool._cache.qsize() == 1 + cache = [pool.connection()] + assert pool._cache.qsize() == 0 + cache.append(pool.connection()) + assert pool._cache.qsize() == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledPg(3, 2, 1, False) + assert pool._cache.qsize() == 3 + cache = [pool.connection() for _i in range(3)] + assert len(cache) == 3 + assert pool._cache.qsize() == 0 + with pytest.raises(TooManyConnectionsError): + pool.connection() + pool = PooledPg(1, 1, 1, True) + assert pool._blocking == 1 + assert pool._cache.qsize() == 1 + db = pool.connection() + assert pool._cache.qsize() == 0 + + def connection(): + pool.connection().query('set thread') + + thread = Thread(target=connection) + thread.start() + thread.join(0.1) + assert thread.is_alive() + assert pool._cache.qsize() == 0 + session = db._con.session + assert session == [] + del db + thread.join(0.1) + assert not thread.is_alive() + assert pool._cache.qsize() == 1 + db = pool.connection() + assert pool._cache.qsize() == 0 + assert session == ['thread'] + assert db + del db + + +def test_one_thread_two_connections(): + pool = PooledPg(2) + db1 = pool.connection() + for _i in range(5): + db1.query('select test') + db2 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(7): + db2.query('select test') + assert db1.num_queries == 5 + assert db2.num_queries == 7 + del db1 + db1 = pool.connection() + assert db1 != db2 + assert db1._con != db2._con + assert hasattr(db1, 'query') + for _i in range(3): + db1.query('select test') + assert db1.num_queries == 8 + db2.query('select test') + assert db2.num_queries == 8 + + +def test_three_threads_two_connections(): + pool = PooledPg(2, 2, 2, True) + queue = Queue(3) + + def connection(): + queue.put(pool.connection(), timeout=1) + + for _i in range(3): + Thread(target=connection).start() + db1 = queue.get(timeout=1) + db2 = queue.get(timeout=1) + db1_con = db1._con + db2_con = db2._con + assert db1 != db2 + assert db1_con != db2_con + with pytest.raises(Empty): + queue.get(timeout=0.1) + del db1 + db1 = queue.get(timeout=1) + assert db1 != db2 + assert db1._con != db2._con + assert db1._con == db1_con + + +def test_reset_transaction(): + pool = PooledPg(1) + db = pool.connection() + db.begin() + con = db._con + assert con._transaction + db.query('select test') + assert con.num_queries == 1 + db.close() + assert pool.connection()._con is con + assert not con._transaction + assert con.session == ['begin', 'rollback'] + assert con.num_queries == 1 + pool = PooledPg(1, reset=1) + db = pool.connection() + db.begin() + con = db._con + assert con._transaction + assert con.session == ['rollback', 'begin'] + db.query('select test') + assert con.num_queries == 1 + db.close() + assert pool.connection()._con is con + assert not con._transaction + assert con.session == ['rollback', 'begin', 'rollback', 'rollback'] + assert con.num_queries == 1 + pool = PooledPg(1, reset=2) + db = pool.connection() + db.begin() + con = db._con + assert con._transaction + assert con.session == ['begin'] + db.query('select test') + assert con.num_queries == 1 + db.close() + assert pool.connection()._con is con + assert not con._transaction + assert con.session == [] + assert con.num_queries == 0 + + +def test_context_manager(): + pool = PooledPg(1, 1, 1) + with pool.connection() as db: + db_con = db._con._con + db.query('select test') + assert db_con.num_queries == 1 + with pytest.raises(TooManyConnectionsError): + pool.connection() + with pool.connection() as db: + db_con = db._con._con + db.query('select test') + assert db_con.num_queries == 2 + with pytest.raises(TooManyConnectionsError): + pool.connection() diff --git a/tests/test_simple_pooled_db.py b/tests/test_simple_pooled_db.py new file mode 100644 index 0000000..bc2cf50 --- /dev/null +++ b/tests/test_simple_pooled_db.py @@ -0,0 +1,141 @@ +"""Test the SimplePooledDB module. + +Note: +We don't test performance here, so the test does not predicate +whether SimplePooledDB actually will help in improving performance or not. +We also do not test any real world DB-API 2 module, we just +mock the basic connection functionality of an arbitrary module. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pytest + +from dbutils import simple_pooled_db + +from . import mock_db as dbapi + + +def my_db_pool(threadsafety, max_connections): + """Get simple PooledDB connection.""" + dbapi_threadsafety = dbapi.threadsafety + dbapi.threadsafety = threadsafety + try: + return simple_pooled_db.PooledDB( + dbapi, max_connections, + 'SimplePooledDBTestDB', 'SimplePooledDBTestUser') + finally: + dbapi.threadsafety = dbapi_threadsafety + + +def test_version(): + from dbutils import __version__ + assert simple_pooled_db.__version__ == __version__ + assert simple_pooled_db.PooledDB.version == __version__ + + +@pytest.mark.parametrize("threadsafety", [None, -1, 0, 4]) +def test_no_threadsafety(threadsafety): + with pytest.raises(simple_pooled_db.NotSupportedError): + my_db_pool(threadsafety, 1) + + +@pytest.mark.parametrize("threadsafety", [1, 2, 3]) +def test_create_connection(threadsafety): + dbpool = my_db_pool(threadsafety, 1) + db = dbpool.connection() + assert hasattr(db, 'cursor') + assert hasattr(db, 'open_cursors') + assert db.open_cursors == 0 + assert hasattr(db, 'database') + assert db.database == 'SimplePooledDBTestDB' + assert hasattr(db, 'user') + assert db.user == 'SimplePooledDBTestUser' + cursor = db.cursor() + assert cursor is not None + assert db.open_cursors == 1 + del cursor + + +@pytest.mark.parametrize("threadsafety", [1, 2, 3]) +def test_close_connection(threadsafety): + db_pool = my_db_pool(threadsafety, 1) + db = db_pool.connection() + assert db.open_cursors == 0 + cursor1 = db.cursor() + assert cursor1 is not None + assert db.open_cursors == 1 + db.close() + assert not hasattr(db, 'open_cursors') + db = db_pool.connection() + assert hasattr(db, 'database') + assert db.database == 'SimplePooledDBTestDB' + assert hasattr(db, 'user') + assert db.user == 'SimplePooledDBTestUser' + assert db.open_cursors == 1 + cursor2 = db.cursor() + assert cursor2 is not None + assert db.open_cursors == 2 + del cursor2 + del cursor1 + + +@pytest.mark.parametrize("threadsafety", [1, 2, 3]) +def test_two_connections(threadsafety): + db_pool = my_db_pool(threadsafety, 2) + db1 = db_pool.connection() + cursors1 = [db1.cursor() for _i_ in range(5)] + db2 = db_pool.connection() + assert db1 != db2 + cursors2 = [db2.cursor() for _i in range(7)] + assert db1.open_cursors == 5 + assert db2.open_cursors == 7 + db1.close() + db1 = db_pool.connection() + assert db1 != db2 + assert hasattr(db1, 'cursor') + for _i in range(3): + cursors1.append(db1.cursor()) + assert db1.open_cursors == 8 + cursors2.append(db2.cursor()) + assert db2.open_cursors == 8 + del cursors2 + del cursors1 + + +def test_threadsafety_1(): + db_pool = my_db_pool(1, 2) + queue = Queue(3) + + def connection(): + queue.put(db_pool.connection()) + + threads = [Thread(target=connection).start() for _i in range(3)] + assert len(threads) == 3 + db1 = queue.get(timeout=1) + db2 = queue.get(timeout=1) + assert db1 != db2 + assert db1._con != db2._con + with pytest.raises(Empty): + queue.get(timeout=0.1) + db2.close() + db3 = queue.get(timeout=1) + assert db1 != db3 + assert db1._con != db3._con + + +@pytest.mark.parametrize("threadsafety", [2, 3]) +def test_threadsafety_2(threadsafety): + dbpool = my_db_pool(threadsafety, 2) + db1 = dbpool.connection() + db2 = dbpool.connection() + cursors = [dbpool.connection().cursor() for _i in range(100)] + assert db1.open_cursors == 50 + assert db2.open_cursors == 50 + assert cursors + del cursors diff --git a/tests/test_simple_pooled_pg.py b/tests/test_simple_pooled_pg.py new file mode 100644 index 0000000..dd2b988 --- /dev/null +++ b/tests/test_simple_pooled_pg.py @@ -0,0 +1,108 @@ +"""Test the SimplePooledPg module. + +Note: +We don't test performance here, so the test does not predicate +whether SimplePooledPg actually will help in improving performance or not. + + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +from queue import Empty, Queue +from threading import Thread + +import pg # noqa: F401 +import pytest + +from dbutils import simple_pooled_pg + + +def my_db_pool(max_connections): + """Get simple PooledPg connection.""" + return simple_pooled_pg.PooledPg( + max_connections, 'SimplePooledPgTestDB', 'SimplePooledPgTestUser') + + +def test_version(): + from dbutils import __version__ + assert simple_pooled_pg.__version__ == __version__ + assert simple_pooled_pg.PooledPg.version == __version__ + + +def test_create_connection(): + db_pool = my_db_pool(1) + db = db_pool.connection() + assert hasattr(db, 'query') + assert hasattr(db, 'num_queries') + assert db.num_queries == 0 + assert hasattr(db, 'dbname') + assert db.dbname == 'SimplePooledPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'SimplePooledPgTestUser' + db.query('select 1') + assert db.num_queries == 1 + + +def test_close_connection(): + db_pool = my_db_pool(1) + db = db_pool.connection() + assert db.num_queries == 0 + db.query('select 1') + assert db.num_queries == 1 + db.close() + assert not hasattr(db, 'num_queries') + db = db_pool.connection() + assert hasattr(db, 'dbname') + assert db.dbname == 'SimplePooledPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'SimplePooledPgTestUser' + assert db.num_queries == 1 + db.query('select 1') + assert db.num_queries == 2 + + +def test_two_connections(): + db_pool = my_db_pool(2) + db1 = db_pool.connection() + for _i in range(5): + db1.query('select 1') + db2 = db_pool.connection() + assert db1 != db2 + assert db1._con != db2._con + for _i in range(7): + db2.query('select 1') + assert db1.num_queries == 5 + assert db2.num_queries == 7 + db1.close() + db1 = db_pool.connection() + assert db1 != db2 + assert db1._con != db2._con + assert hasattr(db1, 'query') + for _i in range(3): + db1.query('select 1') + assert db1.num_queries == 8 + db2.query('select 1') + assert db2.num_queries == 8 + + +def test_threads(): + db_pool = my_db_pool(2) + queue = Queue(3) + + def connection(): + queue.put(db_pool.connection()) + + threads = [Thread(target=connection).start() for _i in range(3)] + assert len(threads) == 3 + db1 = queue.get(timeout=1) + db2 = queue.get(timeout=1) + assert db1 != db2 + assert db1._con != db2._con + with pytest.raises(Empty): + queue.get(timeout=0.1) + db2.close() + db3 = queue.get(timeout=1) + assert db1 != db3 + assert db1._con != db3._con diff --git a/tests/test_steady_db.py b/tests/test_steady_db.py new file mode 100644 index 0000000..e5ce07a --- /dev/null +++ b/tests/test_steady_db.py @@ -0,0 +1,726 @@ +"""Test the SteadyDB module. + +Note: +We do not test any real DB-API 2 module, but we just +mock the basic DB-API 2 connection functionality. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +import pytest + +from dbutils.steady_db import SteadyDBConnection, SteadyDBCursor +from dbutils.steady_db import connect as steady_db_connect + +from . import mock_db as dbapi + + +def test_version(): + from dbutils import __version__, steady_db + assert steady_db.__version__ == __version__ + assert steady_db.SteadyDBConnection.version == __version__ + + +def test_mocked_connection(): + db = dbapi.connect( + 'SteadyDBTestDB', user='SteadyDBTestUser') + db.__class__.has_ping = False + db.__class__.num_pings = 0 + assert hasattr(db, 'database') + assert db.database == 'SteadyDBTestDB' + assert hasattr(db, 'user') + assert db.user == 'SteadyDBTestUser' + assert hasattr(db, 'cursor') + assert hasattr(db, 'close') + assert hasattr(db, 'open_cursors') + assert hasattr(db, 'num_uses') + assert hasattr(db, 'num_queries') + assert hasattr(db, 'session') + assert hasattr(db, 'valid') + assert db.valid + assert db.open_cursors == 0 + for _i in range(3): + cursor = db.cursor() + assert db.open_cursors == 1 + cursor.close() + assert db.open_cursors == 0 + cursor = [] + for i in range(3): + cursor.append(db.cursor()) + assert db.open_cursors == i + 1 + del cursor + assert db.open_cursors == 0 + cursor = db.cursor() + assert hasattr(cursor, 'execute') + assert hasattr(cursor, 'fetchone') + assert hasattr(cursor, 'callproc') + assert hasattr(cursor, 'close') + assert hasattr(cursor, 'valid') + assert cursor.valid + assert db.open_cursors == 1 + for i in range(3): + assert db.num_uses == i + assert db.num_queries == i + cursor.execute(f'select test{i}') + assert cursor.fetchone() == f'test{i}' + assert cursor.valid + assert db.open_cursors == 1 + for _i in range(4): + cursor.callproc('test') + cursor.close() + assert not cursor.valid + assert db.open_cursors == 0 + assert db.num_uses == 7 + assert db.num_queries == 3 + with pytest.raises(dbapi.InternalError): + cursor.close() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test') + assert db.valid + assert not db.__class__.has_ping + assert db.__class__.num_pings == 0 + with pytest.raises(AttributeError): + db.ping() + assert db.__class__.num_pings == 1 + db.__class__.has_ping = True + assert db.ping() is None + assert db.__class__.num_pings == 2 + db.close() + assert not db.valid + assert db.num_uses == 0 + assert db.num_queries == 0 + with pytest.raises(dbapi.InternalError): + db.close() + with pytest.raises(dbapi.InternalError): + db.cursor() + with pytest.raises(dbapi.OperationalError): + db.ping() + assert db.__class__.num_pings == 3 + db.__class__.has_ping = False + db.__class__.num_pings = 0 + + +def test_broken_connection(): + with pytest.raises(TypeError): + SteadyDBConnection(None) + with pytest.raises(TypeError): + SteadyDBCursor(None) + db = steady_db_connect(dbapi, database='ok') + for _i in range(3): + db.close() + del db + with pytest.raises(dbapi.OperationalError): + steady_db_connect(dbapi, database='error') + db = steady_db_connect(dbapi, database='ok') + cursor = db.cursor() + for _i in range(3): + cursor.close() + cursor = db.cursor('ok') + for _i in range(3): + cursor.close() + with pytest.raises(dbapi.OperationalError): + db.cursor('error') + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_close(closeable): + db = steady_db_connect(dbapi, closeable=closeable) + assert db._con.valid + db.close() + assert closeable ^ db._con.valid + db.close() + assert closeable ^ db._con.valid + db._close() + assert not db._con.valid + db._close() + assert not db._con.valid + + +def test_connection(): # noqa: PLR0915 + db = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert isinstance(db, SteadyDBConnection) + assert hasattr(db, '_con') + assert hasattr(db, '_usage') + assert db._usage == 0 + assert hasattr(db._con, 'valid') + assert db._con.valid + assert hasattr(db._con, 'cursor') + assert hasattr(db._con, 'close') + assert hasattr(db._con, 'open_cursors') + assert hasattr(db._con, 'num_uses') + assert hasattr(db._con, 'num_queries') + assert hasattr(db._con, 'session') + assert hasattr(db._con, 'database') + assert db._con.database == 'SteadyDBTestDB' + assert hasattr(db._con, 'user') + assert db._con.user == 'SteadyDBTestUser' + assert hasattr(db, 'cursor') + assert hasattr(db, 'close') + assert db._con.open_cursors == 0 + for _i in range(3): + cursor = db.cursor() + assert db._con.open_cursors == 1 + cursor.close() + assert db._con.open_cursors == 0 + cursor = [] + for i in range(3): + cursor.append(db.cursor()) + assert db._con.open_cursors == i + 1 + del cursor + assert db._con.open_cursors == 0 + cursor = db.cursor() + assert hasattr(cursor, 'execute') + assert hasattr(cursor, 'fetchone') + assert hasattr(cursor, 'callproc') + assert hasattr(cursor, 'close') + assert hasattr(cursor, 'valid') + assert cursor.valid + assert db._con.open_cursors == 1 + for i in range(3): + assert db._usage == i + assert db._con.num_uses == i + assert db._con.num_queries == i + cursor.execute(f'select test{i}') + assert cursor.fetchone() == f'test{i}' + assert cursor.valid + assert db._con.open_cursors == 1 + for _i in range(4): + cursor.callproc('test') + cursor.close() + assert not cursor.valid + assert db._con.open_cursors == 0 + assert db._usage == 7 + assert db._con.num_uses == 7 + assert db._con.num_queries == 3 + cursor.close() + cursor.execute('select test8') + assert cursor.valid + assert db._con.open_cursors == 1 + assert cursor.fetchone() == 'test8' + assert db._usage == 8 + assert db._con.num_uses == 8 + assert db._con.num_queries == 4 + assert db._con.valid + db.close() + assert not db._con.valid + assert db._con.open_cursors == 0 + assert db._usage == 8 + assert db._con.num_uses == 0 + assert db._con.num_queries == 0 + with pytest.raises(dbapi.InternalError): + db._con.close() + db.close() + with pytest.raises(dbapi.InternalError): + db._con.cursor() + cursor = db.cursor() + assert db._con.valid + cursor.execute('select test11') + assert cursor.fetchone() == 'test11' + cursor.execute('select test12') + assert cursor.fetchone() == 'test12' + cursor.callproc('test') + assert db._usage == 3 + assert db._con.num_uses == 3 + assert db._con.num_queries == 2 + cursor2 = db.cursor() + assert db._con.open_cursors == 2 + cursor2.execute('select test13') + assert cursor2.fetchone() == 'test13' + assert db._con.num_queries == 3 + db.close() + assert db._con.open_cursors == 0 + assert db._con.num_queries == 0 + cursor = db.cursor() + assert cursor.valid + cursor.callproc('test') + cursor._cursor.valid = False + assert not cursor.valid + with pytest.raises(dbapi.InternalError): + cursor._cursor.callproc('test') + cursor.callproc('test') + assert cursor.valid + cursor._cursor.callproc('test') + assert db._usage == 2 + assert db._con.num_uses == 3 + db._con.valid = cursor._cursor.valid = False + cursor.callproc('test') + assert cursor.valid + assert db._usage == 1 + assert db._con.num_uses == 1 + cursor.execute('set this') + db.commit() + cursor.execute('set that') + db.rollback() + assert db._con.session == ['this', 'commit', 'that', 'rollback'] + + +def test_connection_context_handler(): + db = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert db._con.session == [] + with db as con: + con.cursor().execute('select test') + assert db._con.session == ['commit'] + try: + with db as con: + con.cursor().execute('error') + except dbapi.ProgrammingError: + error = True + else: + error = False + assert error + assert db._con.session == ['commit', 'rollback'] + + +def test_cursor_context_handler(): + db = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert db._con.open_cursors == 0 + with db.cursor() as cursor: + assert db._con.open_cursors == 1 + cursor.execute('select test') + assert cursor.fetchone() == 'test' + assert db._con.open_cursors == 0 + + +def test_cursor_as_iterator_provided(): + db = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert db._con.open_cursors == 0 + cursor = db.cursor() + assert db._con.open_cursors == 1 + cursor.execute('select test') + _cursor = cursor._cursor + try: + assert not hasattr(_cursor, 'iter') + _cursor.__iter__ = lambda: ['test-iter'] + assert list(iter(cursor)) == ['test'] + finally: + del _cursor.__iter__ + cursor.close() + assert db._con.open_cursors == 0 + + +def test_cursor_as_iterator_created(): + db = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert db._con.open_cursors == 0 + cursor = db.cursor() + assert db._con.open_cursors == 1 + cursor.execute('select test') + assert list(iter(cursor)) == ['test'] + cursor.close() + assert db._con.open_cursors == 0 + + +def test_connection_creator_function(): + db1 = steady_db_connect( + dbapi, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + db2 = steady_db_connect( + dbapi.connect, 0, None, None, None, True, + 'SteadyDBTestDB', user='SteadyDBTestUser') + assert db1.dbapi() == db2.dbapi() + assert db1.threadsafety() == db2.threadsafety() + assert db1._creator == db2._creator + assert db1._args == db2._args + assert db1._kwargs == db2._kwargs + db2.close() + db1.close() + + +def test_connection_maxusage(): + db = steady_db_connect(dbapi, 10) + cursor = db.cursor() + for i in range(100): + cursor.execute(f'select test{i}') + r = cursor.fetchone() + assert r == f'test{i}' + assert db._con.valid + j = i % 10 + 1 + assert db._usage == j + assert db._con.num_uses == j + assert db._con.num_queries == j + assert db._con.open_cursors == 1 + db.begin() + for i in range(100): + cursor.callproc('test') + assert db._con.valid + if i == 49: + db.commit() + j = i % 10 + 1 if i > 49 else i + 11 + assert db._usage == j + assert db._con.num_uses == j + j = 0 if i > 49 else 10 + assert db._con.num_queries == j + for i in range(10): + if i == 7: + db._con.valid = cursor._cursor.valid = False + cursor.execute(f'select test{i}') + r = cursor.fetchone() + assert r == f'test{i}' + j = i % 7 + 1 + assert db._usage == j + assert db._con.num_uses == j + assert db._con.num_queries == j + for i in range(10): + if i == 5: + db._con.valid = cursor._cursor.valid = False + cursor.callproc('test') + j = (i + (3 if i < 5 else -5)) % 10 + 1 + assert db._usage == j + assert db._con.num_uses == j + j = 3 if i < 5 else 0 + assert db._con.num_queries == j + db.close() + cursor.execute('select test1') + assert cursor.fetchone() == 'test1' + assert db._usage == 1 + assert db._con.num_uses == 1 + assert db._con.num_queries == 1 + + +def test_connection_setsession(): + db = steady_db_connect(dbapi, 3, ('set time zone', 'set datestyle')) + assert hasattr(db, '_usage') + assert db._usage == 0 + assert hasattr(db._con, 'open_cursors') + assert db._con.open_cursors == 0 + assert hasattr(db._con, 'num_uses') + assert db._con.num_uses == 2 + assert hasattr(db._con, 'num_queries') + assert db._con.num_queries == 0 + assert hasattr(db._con, 'session') + assert tuple(db._con.session) == ('time zone', 'datestyle') + for _i in range(11): + db.cursor().execute('select test') + assert db._con.open_cursors == 0 + assert db._usage == 2 + assert db._con.num_uses == 4 + assert db._con.num_queries == 2 + assert db._con.session == ['time zone', 'datestyle'] + db.cursor().execute('set test') + assert db._con.open_cursors == 0 + assert db._usage == 3 + assert db._con.num_uses == 5 + assert db._con.num_queries == 2 + assert db._con.session == ['time zone', 'datestyle', 'test'] + db.cursor().execute('select test') + assert db._con.open_cursors == 0 + assert db._usage == 1 + assert db._con.num_uses == 3 + assert db._con.num_queries == 1 + assert db._con.session == ['time zone', 'datestyle'] + db.cursor().execute('set test') + assert db._con.open_cursors == 0 + assert db._usage == 2 + assert db._con.num_uses == 4 + assert db._con.num_queries == 1 + assert db._con.session == ['time zone', 'datestyle', 'test'] + db.cursor().execute('select test') + assert db._con.open_cursors == 0 + assert db._usage == 3 + assert db._con.num_uses == 5 + assert db._con.num_queries == 2 + assert db._con.session == ['time zone', 'datestyle', 'test'] + db.close() + db.cursor().execute('set test') + assert db._con.open_cursors == 0 + assert db._usage == 1 + assert db._con.num_uses == 3 + assert db._con.num_queries == 0 + assert db._con.session == ['time zone', 'datestyle', 'test'] + db.close() + db.cursor().execute('select test') + assert db._con.open_cursors == 0 + assert db._usage == 1 + assert db._con.num_uses == 3 + assert db._con.num_queries == 1 + assert db._con.session == ['time zone', 'datestyle'] + + +def test_connection_failures(): + db = steady_db_connect(dbapi) + db.close() + db.cursor() + db = steady_db_connect(dbapi, failures=dbapi.InternalError) + db.close() + db.cursor() + db = steady_db_connect(dbapi, failures=dbapi.OperationalError) + db.close() + with pytest.raises(dbapi.InternalError): + db.cursor() + db = steady_db_connect(dbapi, failures=( + dbapi.OperationalError, dbapi.InterfaceError)) + db.close() + with pytest.raises(dbapi.InternalError): + db.cursor() + db = steady_db_connect(dbapi, failures=( + dbapi.OperationalError, dbapi.InterfaceError, dbapi.InternalError)) + db.close() + db.cursor() + + +def test_connection_failure_error(): + db = steady_db_connect(dbapi) + cursor = db.cursor() + db.close() + cursor.execute('select test') + cursor = db.cursor() + db.close() + with pytest.raises(dbapi.ProgrammingError): + cursor.execute('error') + + +def test_connection_set_sizes(): + db = steady_db_connect(dbapi) + cursor = db.cursor() + cursor.execute('get sizes') + result = cursor.fetchone() + assert result == ([], {}) + cursor.setinputsizes([7, 42, 6]) + cursor.setoutputsize(9) + cursor.setoutputsize(15, 3) + cursor.setoutputsize(42, 7) + cursor.execute('get sizes') + result = cursor.fetchone() + assert result == ([7, 42, 6], {None: 9, 3: 15, 7: 42}) + cursor.execute('get sizes') + result = cursor.fetchone() + assert result == ([], {}) + cursor.setinputsizes([6, 42, 7]) + cursor.setoutputsize(7) + cursor.setoutputsize(15, 3) + cursor.setoutputsize(42, 9) + db.close() + cursor.execute('get sizes') + result = cursor.fetchone() + assert result == ([6, 42, 7], {None: 7, 3: 15, 9: 42}) + + +def test_connection_ping_check(): + con_cls = dbapi.Connection + con_cls.has_ping = False + con_cls.num_pings = 0 + db = steady_db_connect(dbapi) + db.cursor().execute('select test') + assert con_cls.num_pings == 0 + db.close() + db.cursor().execute('select test') + assert con_cls.num_pings == 0 + assert db._ping_check() is None + assert con_cls.num_pings == 1 + db = steady_db_connect(dbapi, ping=7) + db.cursor().execute('select test') + assert con_cls.num_pings == 2 + db.close() + db.cursor().execute('select test') + assert con_cls.num_pings == 2 + assert db._ping_check() is None + assert con_cls.num_pings == 2 + con_cls.has_ping = True + db = steady_db_connect(dbapi) + db.cursor().execute('select test') + assert con_cls.num_pings == 2 + db.close() + db.cursor().execute('select test') + assert con_cls.num_pings == 2 + assert db._ping_check() + assert con_cls.num_pings == 3 + db = steady_db_connect(dbapi, ping=1) + db.cursor().execute('select test') + assert con_cls.num_pings == 3 + db.close() + db.cursor().execute('select test') + assert con_cls.num_pings == 3 + assert db._ping_check() + assert con_cls.num_pings == 4 + db.close() + assert db._ping_check() + assert con_cls.num_pings == 5 + db = steady_db_connect(dbapi, ping=7) + db.cursor().execute('select test') + assert con_cls.num_pings == 7 + db.close() + db.cursor().execute('select test') + assert con_cls.num_pings == 9 + db = steady_db_connect(dbapi, ping=3) + assert con_cls.num_pings == 9 + db.cursor() + assert con_cls.num_pings == 10 + db.close() + cursor = db.cursor() + assert con_cls.num_pings == 11 + cursor.execute('select test') + assert con_cls.num_pings == 11 + db = steady_db_connect(dbapi, ping=5) + assert con_cls.num_pings == 11 + db.cursor() + assert con_cls.num_pings == 11 + db.close() + cursor = db.cursor() + assert con_cls.num_pings == 11 + cursor.execute('select test') + assert con_cls.num_pings == 12 + db.close() + cursor = db.cursor() + assert con_cls.num_pings == 12 + cursor.execute('select test') + assert con_cls.num_pings == 13 + db = steady_db_connect(dbapi, ping=7) + assert con_cls.num_pings == 13 + db.cursor() + assert con_cls.num_pings == 14 + db.close() + cursor = db.cursor() + assert con_cls.num_pings == 15 + cursor.execute('select test') + assert con_cls.num_pings == 16 + db.close() + cursor = db.cursor() + assert con_cls.num_pings == 17 + cursor.execute('select test') + assert con_cls.num_pings == 18 + db.close() + cursor.execute('select test') + assert con_cls.num_pings == 20 + con_cls.has_ping = False + con_cls.num_pings = 0 + + +def test_begin_transaction(): + db = steady_db_connect(dbapi, database='ok') + cursor = db.cursor() + cursor.close() + cursor.execute('select test12') + assert cursor.fetchone() == 'test12' + db.begin() + cursor = db.cursor() + cursor.close() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test12') + cursor.execute('select test12') + assert cursor.fetchone() == 'test12' + db.close() + db.begin() + with pytest.raises(dbapi.InternalError): + cursor.execute('select test12') + cursor.execute('select test12') + assert cursor.fetchone() == 'test12' + db.begin() + with pytest.raises(dbapi.ProgrammingError): + cursor.execute('error') + cursor.close() + cursor.execute('select test12') + assert cursor.fetchone() == 'test12' + + +def test_with_begin_extension(): + db = steady_db_connect(dbapi, database='ok') + db._con._begin_called_with = None + + def begin(a, b=None, c=7): + db._con._begin_called_with = (a, b, c) + + db._con.begin = begin + db.begin(42, 6) + cursor = db.cursor() + cursor.execute('select test13') + assert cursor.fetchone() == 'test13' + assert db._con._begin_called_with == (42, 6, 7) + + +def test_cancel_transaction(): + db = steady_db_connect(dbapi, database='ok') + cursor = db.cursor() + db.begin() + cursor.execute('select test14') + assert cursor.fetchone() == 'test14' + db.cancel() + cursor.execute('select test14') + assert cursor.fetchone() == 'test14' + + +def test_with_cancel_extension(): + db = steady_db_connect(dbapi, database='ok') + db._con._cancel_called = None + + def cancel(): + db._con._cancel_called = 'yes' + + db._con.cancel = cancel + db.begin() + cursor = db.cursor() + cursor.execute('select test15') + assert cursor.fetchone() == 'test15' + db.cancel() + assert db._con._cancel_called == 'yes' + + +def test_reset_transaction(): + db = steady_db_connect(dbapi, database='ok') + db.begin() + assert not db._con.session + db.close() + assert not db._con.session + db = steady_db_connect(dbapi, database='ok', closeable=False) + db.begin() + assert not db._con.session + db.close() + assert db._con.session == ['rollback'] + + +def test_commit_error(): + db = steady_db_connect(dbapi, database='ok') + db.begin() + assert not db._con.session + assert db._con.valid + db.commit() + assert db._con.session == ['commit'] + assert db._con.valid + db.begin() + db._con.valid = False + con = db._con + with pytest.raises(dbapi.InternalError): + db.commit() + assert not db._con.session + assert db._con.valid + assert con is not db._con + db.begin() + assert not db._con.session + assert db._con.valid + db.commit() + assert db._con.session == ['commit'] + assert db._con.valid + + +def test_rollback_error(): + db = steady_db_connect(dbapi, database='ok') + db.begin() + assert not db._con.session + assert db._con.valid + db.rollback() + assert db._con.session == ['rollback'] + assert db._con.valid + db.begin() + db._con.valid = False + con = db._con + with pytest.raises(dbapi.InternalError): + db.rollback() + assert not db._con.session + assert db._con.valid + assert con is not db._con + db.begin() + assert not db._con.session + assert db._con.valid + db.rollback() + assert db._con.session == ['rollback'] + assert db._con.valid diff --git a/tests/test_steady_pg.py b/tests/test_steady_pg.py new file mode 100644 index 0000000..830c07c --- /dev/null +++ b/tests/test_steady_pg.py @@ -0,0 +1,329 @@ +"""Test the SteadyPg module. + +Note: +We do not test the real PyGreSQL module, but we just +mock the basic connection functionality of that module. +We assume that the PyGreSQL module will detect lost +connections correctly and set the status flag accordingly. + +Copyright and credit info: + +* This test was contributed by Christoph Zwerschke +""" + +import sys + +import pg +import pytest + +from dbutils.steady_pg import SteadyPgConnection + + +def test_version(): + from dbutils import __version__, steady_pg + assert steady_pg.__version__ == __version__ + assert steady_pg.SteadyPgConnection.version == __version__ + + +def test_mocked_connection(): + db_cls = pg.DB + db = db_cls( + 'SteadyPgTestDB', user='SteadyPgTestUser') + assert hasattr(db, 'db') + assert hasattr(db.db, 'status') + assert db.db.status + assert hasattr(db.db, 'query') + assert hasattr(db.db, 'close') + assert not hasattr(db.db, 'reopen') + assert hasattr(db, 'reset') + assert hasattr(db.db, 'num_queries') + assert hasattr(db.db, 'session') + assert not hasattr(db.db, 'get_tables') + assert hasattr(db.db, 'db') + assert db.db.db == 'SteadyPgTestDB' + assert hasattr(db.db, 'user') + assert db.db.user == 'SteadyPgTestUser' + assert hasattr(db, 'query') + assert hasattr(db, 'close') + assert hasattr(db, 'reopen') + assert hasattr(db, 'reset') + assert hasattr(db, 'num_queries') + assert hasattr(db, 'session') + assert hasattr(db, 'get_tables') + assert hasattr(db, 'dbname') + assert db.dbname == 'SteadyPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'SteadyPgTestUser' + for i in range(3): + assert db.num_queries == i + assert db.query(f'select test{i}') == f'test{i}' + assert db.db.status + db.reopen() + assert db.db.status + assert db.num_queries == 0 + assert db.query('select test4') == 'test4' + assert db.get_tables() == 'test' + db.close() + try: + status = db.db.status + except AttributeError: + status = False + assert not status + with pytest.raises(pg.InternalError): + db.close() + with pytest.raises(pg.InternalError): + db.query('select test') + with pytest.raises(pg.InternalError): + db.get_tables() + + +def test_broken_connection(): + with pytest.raises(TypeError): + SteadyPgConnection('wrong') + db = SteadyPgConnection(dbname='ok') + internal_error_cls = sys.modules[db._con.__module__].InternalError + for _i in range(3): + db.close() + del db + with pytest.raises(internal_error_cls): + SteadyPgConnection(dbname='error') + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_close(closeable): + db = SteadyPgConnection(closeable=closeable) + assert db._con.db + assert db._con.valid is True + db.close() + assert closeable ^ (db._con.db is not None and db._con.valid) + db.close() + assert closeable ^ (db._con.db is not None and db._con.valid) + db._close() + assert not db._con.db + db._close() + assert not db._con.db + + +def test_connection(): + db = SteadyPgConnection( + 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') + assert hasattr(db, 'db') + assert hasattr(db, '_con') + assert db.db == db._con.db + assert hasattr(db, '_usage') + assert db._usage == 0 + assert hasattr(db.db, 'status') + assert db.db.status + assert hasattr(db.db, 'query') + assert hasattr(db.db, 'close') + assert not hasattr(db.db, 'reopen') + assert hasattr(db.db, 'reset') + assert hasattr(db.db, 'num_queries') + assert hasattr(db.db, 'session') + assert hasattr(db.db, 'db') + assert db.db.db == 'SteadyPgTestDB' + assert hasattr(db.db, 'user') + assert db.db.user == 'SteadyPgTestUser' + assert not hasattr(db.db, 'get_tables') + assert hasattr(db, 'query') + assert hasattr(db, 'close') + assert hasattr(db, 'reopen') + assert hasattr(db, 'reset') + assert hasattr(db, 'num_queries') + assert hasattr(db, 'session') + assert hasattr(db, 'dbname') + assert db.dbname == 'SteadyPgTestDB' + assert hasattr(db, 'user') + assert db.user == 'SteadyPgTestUser' + assert hasattr(db, 'get_tables') + for i in range(3): + assert db._usage == i + assert db.num_queries == i + assert db.query(f'select test{i}') == f'test{i}' + assert db.db.status + assert db.get_tables() == 'test' + assert db.db.status + assert db._usage == 4 + assert db.num_queries == 3 + db.reopen() + assert db.db.status + assert db._usage == 0 + assert db.num_queries == 0 + assert db.query('select test') == 'test' + assert db.db.status + assert hasattr(db._con, 'status') + assert db._con.status + assert hasattr(db._con, 'close') + assert hasattr(db._con, 'query') + db.close() + try: + status = db.db.status + except AttributeError: + status = False + assert not status + assert hasattr(db._con, 'close') + assert hasattr(db._con, 'query') + internal_error_cls = sys.modules[db._con.__module__].InternalError + with pytest.raises(internal_error_cls): + db._con.close() + with pytest.raises(internal_error_cls): + db._con.query('select test') + assert db.query('select test') == 'test' + assert db.db.status + assert db._usage == 1 + assert db.num_queries == 1 + db.db.status = False + assert not db.db.status + assert db.query('select test') == 'test' + assert db.db.status + assert db._usage == 1 + assert db.num_queries == 1 + db.db.status = False + assert not db.db.status + assert db.get_tables() == 'test' + assert db.db.status + assert db._usage == 1 + assert db.num_queries == 0 + + +def test_connection_context_handler(): + db = SteadyPgConnection( + 0, None, 1, 'SteadyPgTestDB', user='SteadyPgTestUser') + assert db.session == [] + with db: + db.query('select test') + assert db.session == ['begin', 'commit'] + try: + with db: + db.query('error') + except pg.ProgrammingError: + error = True + else: + error = False + assert error + assert db._con.session == ['begin', 'commit', 'begin', 'rollback'] + + +def test_connection_maxusage(): + db = SteadyPgConnection(10) + for i in range(100): + r = db.query(f'select test{i}') + assert r == f'test{i}' + assert db.db.status + j = i % 10 + 1 + assert db._usage == j + assert db.num_queries == j + db.begin() + for i in range(100): + r = db.get_tables() + assert r == 'test' + assert db.db.status + if i == 49: + db.commit() + j = i % 10 + 1 if i > 49 else i + 11 + assert db._usage == j + j = 0 if i > 49 else 10 + assert db.num_queries == j + for i in range(10): + if i == 7: + db.db.status = False + r = db.query(f'select test{i}') + assert r == f'test{i}' + j = i % 7 + 1 + assert db._usage == j + assert db.num_queries == j + for i in range(10): + if i == 5: + db.db.status = False + r = db.get_tables() + assert r == 'test' + j = (i + (3 if i < 5 else -5)) % 10 + 1 + assert db._usage == j + j = 3 if i < 5 else 0 + assert db.num_queries == j + db.close() + assert db.query('select test1') == 'test1' + assert db._usage == 1 + assert db.num_queries == 1 + db.reopen() + assert db._usage == 0 + assert db.num_queries == 0 + assert db.query('select test2') == 'test2' + assert db._usage == 1 + assert db.num_queries == 1 + + +def test_connection_setsession(): + db = SteadyPgConnection(3, ('set time zone', 'set datestyle')) + assert hasattr(db, 'num_queries') + assert db.num_queries == 0 + assert hasattr(db, 'session') + assert tuple(db.session) == ('time zone', 'datestyle') + for _i in range(11): + db.query('select test') + assert db.num_queries == 2 + assert db.session == ['time zone', 'datestyle'] + db.query('set test') + assert db.num_queries == 2 + assert db.session == ['time zone', 'datestyle', 'test'] + db.query('select test') + assert db.num_queries == 1 + assert db.session == ['time zone', 'datestyle'] + db.close() + db.query('set test') + assert db.num_queries == 0 + assert db.session == ['time zone', 'datestyle', 'test'] + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_begin(closeable): + db = SteadyPgConnection(closeable=closeable) + db.begin() + assert db.session == ['begin'] + db.query('select test') + assert db.num_queries == 1 + db.close() + db.query('select test') + assert db.num_queries == 1 + db.begin() + assert db.session == ['begin'] + db.db.close() + with pytest.raises(pg.InternalError): + db.query('select test') + assert db.num_queries == 0 + db.query('select test') + assert db.num_queries == 1 + assert db.begin('select sql:begin') == 'sql:begin' + assert db.num_queries == 2 + + +@pytest.mark.parametrize("closeable", [False, True]) +def test_end(closeable): + db = SteadyPgConnection(closeable=closeable) + db.begin() + db.query('select test') + db.end() + assert db.session == ['begin', 'end'] + db.db.close() + db.query('select test') + assert db.num_queries == 1 + assert db.begin('select sql:end') == 'sql:end' + assert db.num_queries == 2 + db.begin() + db.query('select test') + db.commit() + assert db.session == ['begin', 'commit'] + db.db.close() + db.query('select test') + assert db.num_queries == 1 + assert db.begin('select sql:commit') == 'sql:commit' + assert db.num_queries == 2 + db.begin() + db.query('select test') + db.rollback() + assert db.session == ['begin', 'rollback'] + db.db.close() + db.query('select test') + assert db.num_queries == 1 + assert db.begin('select sql:rollback') == 'sql:rollback' + assert db.num_queries == 2 diff --git a/tests/test_threading_local.py b/tests/test_threading_local.py new file mode 100644 index 0000000..3871e6c --- /dev/null +++ b/tests/test_threading_local.py @@ -0,0 +1,82 @@ +"""Test the ThreadingLocal module.""" + +from threading import Thread + +from dbutils.persistent_db import local + + +def test_getattr(): + my_data = local() + my_data.number = 42 + assert my_data.number == 42 + + +def test_dict(): + my_data = local() + my_data.number = 42 + assert my_data.__dict__ == {'number': 42} + my_data.__dict__.setdefault('widgets', []) + assert my_data.widgets == [] + + +def test_threadlocal(): + def f(): + items = sorted(my_data.__dict__.items()) + log.append(items) + my_data.number = 11 + log.append(my_data.number) + my_data = local() + my_data.number = 42 + log = [] + thread = Thread(target=f) + thread.start() + thread.join() + assert log == [[], 11] + assert my_data.number == 42 + + +def test_subclass(): + + class MyLocal(local): + number = 2 + initialized = 0 + + def __init__(self, **kw): + if self.initialized: + raise SystemError + self.initialized = 1 + self.__dict__.update(kw) + + def squared(self): + return self.number ** 2 + + my_data = MyLocal(color='red') + assert my_data.number == 2 + assert my_data.color == 'red' + del my_data.color + assert my_data.squared() == 4 + + def f(): + items = sorted(my_data.__dict__.items()) + log.append(items) + my_data.number = 7 + log.append(my_data.number) + + log = [] + thread = Thread(target=f) + thread.start() + thread.join() + assert log == [[('color', 'red'), ('initialized', 1)], 7] + assert my_data.number == 2 + assert not hasattr(my_data, 'color') + + class MyLocal(local): + __slots__ = ('number',) + + my_data = MyLocal() + my_data.number = 42 + my_data.color = 'red' + thread = Thread(target=f) + thread.start() + thread.join() + assert my_data.number == 7 diff --git a/tox.ini b/tox.ini index cfc48a4..62aab9d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,21 +1,34 @@ [tox] -envlist = py{27,35,36,37,38,39}, flake8 - -[pytest] -python_files=Test*.py +envlist = py3{7,8,9,10,11,12}, ruff, manifest, docs, spell [testenv] setenv = PYTHONPATH = {toxinidir} -deps = - pytest +extras = tests +commands = + pytest {posargs} + +[testenv:spell] +basepython = python3.11 +deps = codespell +commands = + codespell . + +[testenv:ruff] +basepython = python3.11 +deps = ruff +commands = + ruff check . + +[testenv:manifest] +basepython = python3.11 +deps = check-manifest commands = - pytest + check-manifest -v -[testenv:flake8] -basepython = - python -deps = - flake8 +[testenv:docs] +basepython = python3.11 +extras = docs +changedir = docs commands = - flake8 DBUtils + python make.py